code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
export { default as withGuardian } from './withGuardian'
export const holder = 1
| mydearxym/mastani | src/hoc/index.js | JavaScript | apache-2.0 | 81 |
using Esri.ArcGISRuntime.Layers;
using System;
using Windows.UI.Popups;
using Windows.UI.Xaml.Controls;
namespace ArcGISRuntimeSDKDotNet_PhoneSamples.Samples
{
/// <summary>
/// Creates a GeoRssLayer based on the United States Geological Survey earthquake feed.
/// </summary>
/// <title>GeoRSS Layer</title>
/// <category>Graphics Layers</category>
public sealed partial class GeoRssLayerSample : Page
{
public GeoRssLayerSample()
{
this.InitializeComponent();
}
private async void OnLayerUpdateButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
try
{
var rssLayer = MyMapView.Map.Layers["RssLayer"] as GeoRssLayer;
await rssLayer.UpdateAsync();
await new MessageDialog("Layer updated successfully", "GeoRSS Layer Sample").ShowAsync();
}
catch (Exception ex)
{
var _x = new MessageDialog(ex.Message, "GeoRSS Layer Sample").ShowAsync();
}
}
}
}
| Tyshark9/arcgis-runtime-samples-dotnet | src/Phone/ArcGISRuntimeSDKDotNet_PhoneSamples/Samples/GraphicsLayers/GeoRssLayerSample.xaml.cs | C# | apache-2.0 | 921 |
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module DTK::Client
class ServiceAndComponentInfo::TransformFrom
class Info
require_relative('info/service')
require_relative('info/component')
attr_reader :input_file_paths
def initialize(content_dir, dtk_dsl_parse_helper)
@content_dir = content_dir
@dtk_dsl_info_processor = dtk_dsl_parse_helper.info_processor(info_type)
# dynamically computed
@input_file_paths = []
@directory_file_paths = nil # this will be all paths in module
end
private :initialize
def self.create(info_type, content_dir, dtk_dsl_parse_helper)
case info_type
when :service_info then Service.new(content_dir, dtk_dsl_parse_helper)
when :component_info then Component.new(content_dir, dtk_dsl_parse_helper)
else
fail Error, "Unexpected info_type '#{info_type}'"
end
end
private
def add_content!(input_files_processor, path)
@input_file_paths << path
input_files_processor.add_content!(path, get_raw_content?(path))
end
def input_files_processor(type)
@dtk_dsl_info_processor.indexed_input_files[type] || raise_missing_type_error(type)
end
def module_refs_path
matches = dsl_file_matches { |path| module_ref_input_files_processor.match?(path) }
raise Error, "Unexpected that multiple module ref files" if matches.size > 1
matches.first
end
def dsl_file_matches(&block)
# TODO: maybe better to solve by restricting directory_file_paths to be least_nested_pos of 2
matches = directory_file_paths.select { |path| block.call(path) }
if matches.size > 1
least_nested_pos = matches.map { |match| match.split('/').size }.min
matches.reject! { |match| match.split('/').size != least_nested_pos }
end
matches
end
def module_ref_input_files_processor
@module_ref_input_files_processor ||= input_files_processor(:module_refs)
end
def get_raw_content?(file_path)
File.open(file_path).read if file_path and File.exists?(file_path)
end
def raise_missing_type_error(type)
raise Error, "Unexpected that no indexed_input_files of type '#{type}'"
end
def directory_file_paths
@directory_file_paths ||= Dir.glob("#{@content_dir}/**/*")
end
def get_raw_content?(file_path)
File.open(file_path).read if file_path and File.exists?(file_path)
end
end
end
end
| dtk/dtk-client | lib/client/service_and_component_info/transform_from/info.rb | Ruby | apache-2.0 | 3,187 |
using System.Collections.Generic;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
namespace ManualOAuth.Controllers
{
[Authorize]
public class SecureController : Controller
{
public IActionResult Index()
{
Dictionary<string, string> claimTable = new Dictionary<string, string>();
foreach(var claim in User.Claims)
{
claimTable[claim.Type] = claim.Value;
}
return View(claimTable);
}
}
}
| lozanotek/aspnet-security | src/Providers/ManualOAuth/Controllers/SecureController.cs | C# | apache-2.0 | 536 |
/*
Copyright 2012 -2014 Michael Remond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pac4j.saml.credentials;
import java.util.List;
import org.opensaml.saml2.core.Attribute;
import org.opensaml.saml2.core.NameID;
import org.pac4j.core.credentials.Credentials;
/**
* Credentials containing the nameId of the SAML subject and all of its attributes.
*
* @author Michael Remond
* @since 1.5.0
*/
public class Saml2Credentials extends Credentials {
private static final long serialVersionUID = 5040516205957826527L;
private final NameID nameId;
private final List<Attribute> attributes;
public Saml2Credentials(final NameID nameId, final List<Attribute> attributes, final String clientName) {
this.nameId = nameId;
this.attributes = attributes;
setClientName(clientName);
}
public NameID getNameId() {
return this.nameId;
}
public List<Attribute> getAttributes() {
return this.attributes;
}
@Override
public String toString() {
return "SAMLCredential [nameId=" + this.nameId + ", attributes=" + this.attributes + "]";
}
}
| F0REacH/pac4j-1.5.1 | pac4j-saml/src/main/java/org/pac4j/saml/credentials/Saml2Credentials.java | Java | apache-2.0 | 1,655 |
package astilectron
import (
"encoding/json"
"errors"
)
// Event names
const (
EventNameAppEventReady = "app.event.ready"
EventNameAppClose = "app.close"
EventNameAppCrash = "app.crash"
EventNameAppStop = "app.stop" // This event is equal to EventNameAppClose || EventNameAppCrash
EventNameProvisionStart = "provision.start"
EventNameProvisionDone = "provision.done"
EventNameWindowCmdBlur = "window.cmd.blur"
EventNameWindowCmdCenter = "window.cmd.center"
EventNameWindowCmdClose = "window.cmd.close"
EventNameWindowCmdCreate = "window.cmd.create"
EventNameWindowCmdDestroy = "window.cmd.destroy"
EventNameWindowCmdFocus = "window.cmd.focus"
EventNameWindowCmdHide = "window.cmd.hide"
EventNameWindowCmdMaximize = "window.cmd.maximize"
EventNameWindowCmdMessage = "window.cmd.message"
EventNameWindowCmdMinimize = "window.cmd.minimize"
EventNameWindowCmdMove = "window.cmd.move"
EventNameWindowCmdResize = "window.cmd.resize"
EventNameWindowCmdRestore = "window.cmd.restore"
EventNameWindowCmdShow = "window.cmd.show"
EventNameWindowCmdUnmaximize = "window.cmd.unmaximize"
EventNameWindowCmdWebContentsCloseDevTools = "window.cmd.web.contents.close.dev.tools"
EventNameWindowCmdWebContentsOpenDevTools = "window.cmd.web.contents.open.dev.tools"
EventNameWindowEventBlur = "window.event.blur"
EventNameWindowEventClosed = "window.event.closed"
EventNameWindowEventDidFinishLoad = "window.event.did.finish.load"
EventNameWindowEventFocus = "window.event.focus"
EventNameWindowEventHide = "window.event.hide"
EventNameWindowEventMaximize = "window.event.maximize"
EventNameWindowEventMessage = "window.event.message"
EventNameWindowEventMinimize = "window.event.minimize"
EventNameWindowEventMove = "window.event.move"
EventNameWindowEventReadyToShow = "window.event.ready.to.show"
EventNameWindowEventResize = "window.event.resize"
EventNameWindowEventRestore = "window.event.restore"
EventNameWindowEventShow = "window.event.show"
EventNameWindowEventUnmaximize = "window.event.unmaximize"
EventNameWindowEventUnresponsive = "window.event.unresponsive"
)
// Other constants
const (
mainTargetID = "main"
)
// Event represents an event
type Event struct {
// This is the base of the event
Name string `json:"name"`
TargetID string `json:"targetID"`
// This is a list of all possible payloads.
// A choice was made not to use interfaces since it's a pain in the ass asserting each an every payload afterwards
// We use pointers so that omitempty works
Message *EventMessage `json:"message,omitempty"`
URL string `json:"url,omitempty"`
WindowOptions *WindowOptions `json:"windowOptions,omitempty"`
}
// EventMessage represents an event message
type EventMessage struct {
i interface{}
}
// newEventMessage creates a new event message
func newEventMessage(i interface{}) *EventMessage {
return &EventMessage{i: i}
}
// MarshalJSON implements the JSONMarshaler interface
func (p *EventMessage) MarshalJSON() ([]byte, error) {
return json.Marshal(p.i)
}
// Unmarshal unmarshals the payload into the given interface
func (p *EventMessage) Unmarshal(i interface{}) error {
if b, ok := p.i.([]byte); ok {
return json.Unmarshal(b, i)
}
return errors.New("event message should []byte")
}
// UnmarshalJSON implements the JSONUnmarshaler interface
func (p *EventMessage) UnmarshalJSON(i []byte) error {
p.i = i
return nil
}
| robjporter/go-functions2 | gui/astilectron/event.go | GO | apache-2.0 | 4,036 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.hydromatic.optiq.jdbc;
import net.hydromatic.avatica.*;
import net.hydromatic.linq4j.Queryable;
import net.hydromatic.optiq.server.OptiqServerStatement;
/**
* Implementation of {@link java.sql.Statement}
* for the Calcite engine.
*/
public abstract class OptiqStatement
extends AvaticaStatement
implements OptiqServerStatement {
OptiqStatement(
OptiqConnectionImpl connection,
int resultSetType,
int resultSetConcurrency,
int resultSetHoldability) {
super(connection, resultSetType, resultSetConcurrency,
resultSetHoldability);
}
// implement Statement
@Override public OptiqConnectionImpl getConnection() {
return (OptiqConnectionImpl) connection;
}
public OptiqConnectionImpl.ContextImpl createPrepareContext() {
return new OptiqConnectionImpl.ContextImpl(getConnection());
}
protected <T> OptiqPrepare.PrepareResult<T> prepare(Queryable<T> queryable) {
final OptiqPrepare prepare = getConnection().prepareFactory.apply();
return prepare.prepareQueryable(createPrepareContext(), queryable);
}
@Override
protected void close_() {
if (!closed) {
closed = true;
final OptiqConnectionImpl connection1 = (OptiqConnectionImpl) connection;
connection1.server.removeStatement(this);
if (openResultSet != null) {
AvaticaResultSet c = openResultSet;
openResultSet = null;
c.close();
}
// If onStatementClose throws, this method will throw an exception (later
// converted to SQLException), but this statement still gets closed.
connection1.getDriver().handler.onStatementClose(this);
}
}
}
// End OptiqStatement.java
| nvoron23/incubator-calcite | core/src/main/java/net/hydromatic/optiq/jdbc/OptiqStatement.java | Java | apache-2.0 | 2,496 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.analyzer;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import io.prestosql.Session;
import io.prestosql.SystemSessionProperties;
import io.prestosql.connector.CatalogName;
import io.prestosql.connector.informationschema.InformationSchemaConnector;
import io.prestosql.connector.system.SystemConnector;
import io.prestosql.execution.QueryManagerConfig;
import io.prestosql.execution.TaskManagerConfig;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.memory.MemoryManagerConfig;
import io.prestosql.metadata.Catalog;
import io.prestosql.metadata.CatalogManager;
import io.prestosql.metadata.InMemoryNodeManager;
import io.prestosql.metadata.InternalNodeManager;
import io.prestosql.metadata.Metadata;
import io.prestosql.metadata.QualifiedObjectName;
import io.prestosql.metadata.SessionPropertyManager;
import io.prestosql.security.AccessControl;
import io.prestosql.security.AccessControlManager;
import io.prestosql.security.AllowAllAccessControl;
import io.prestosql.spi.connector.ColumnMetadata;
import io.prestosql.spi.connector.Connector;
import io.prestosql.spi.connector.ConnectorMetadata;
import io.prestosql.spi.connector.ConnectorTableMetadata;
import io.prestosql.spi.connector.ConnectorTransactionHandle;
import io.prestosql.spi.connector.ConnectorViewDefinition;
import io.prestosql.spi.connector.SchemaTableName;
import io.prestosql.spi.session.PropertyMetadata;
import io.prestosql.spi.transaction.IsolationLevel;
import io.prestosql.spi.type.ArrayType;
import io.prestosql.sql.parser.SqlParser;
import io.prestosql.sql.tree.Statement;
import io.prestosql.testing.TestingMetadata;
import io.prestosql.testing.assertions.PrestoExceptionAssert;
import io.prestosql.transaction.TransactionManager;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import static io.prestosql.connector.CatalogName.createInformationSchemaCatalogName;
import static io.prestosql.connector.CatalogName.createSystemTablesCatalogName;
import static io.prestosql.metadata.MetadataManager.createTestMetadataManager;
import static io.prestosql.operator.scalar.ApplyFunction.APPLY_FUNCTION;
import static io.prestosql.spi.StandardErrorCode.AMBIGUOUS_NAME;
import static io.prestosql.spi.StandardErrorCode.CATALOG_NOT_FOUND;
import static io.prestosql.spi.StandardErrorCode.COLUMN_NOT_FOUND;
import static io.prestosql.spi.StandardErrorCode.COLUMN_TYPE_UNKNOWN;
import static io.prestosql.spi.StandardErrorCode.DUPLICATE_COLUMN_NAME;
import static io.prestosql.spi.StandardErrorCode.DUPLICATE_NAMED_QUERY;
import static io.prestosql.spi.StandardErrorCode.DUPLICATE_PROPERTY;
import static io.prestosql.spi.StandardErrorCode.EXPRESSION_NOT_AGGREGATE;
import static io.prestosql.spi.StandardErrorCode.EXPRESSION_NOT_CONSTANT;
import static io.prestosql.spi.StandardErrorCode.EXPRESSION_NOT_IN_DISTINCT;
import static io.prestosql.spi.StandardErrorCode.EXPRESSION_NOT_SCALAR;
import static io.prestosql.spi.StandardErrorCode.FUNCTION_NOT_AGGREGATE;
import static io.prestosql.spi.StandardErrorCode.INVALID_ARGUMENTS;
import static io.prestosql.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE;
import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.prestosql.spi.StandardErrorCode.INVALID_LITERAL;
import static io.prestosql.spi.StandardErrorCode.INVALID_PARAMETER_USAGE;
import static io.prestosql.spi.StandardErrorCode.INVALID_VIEW;
import static io.prestosql.spi.StandardErrorCode.INVALID_WINDOW_FRAME;
import static io.prestosql.spi.StandardErrorCode.MISMATCHED_COLUMN_ALIASES;
import static io.prestosql.spi.StandardErrorCode.MISSING_CATALOG_NAME;
import static io.prestosql.spi.StandardErrorCode.MISSING_COLUMN_NAME;
import static io.prestosql.spi.StandardErrorCode.MISSING_GROUP_BY;
import static io.prestosql.spi.StandardErrorCode.MISSING_ORDER_BY;
import static io.prestosql.spi.StandardErrorCode.MISSING_OVER;
import static io.prestosql.spi.StandardErrorCode.MISSING_SCHEMA_NAME;
import static io.prestosql.spi.StandardErrorCode.NESTED_AGGREGATION;
import static io.prestosql.spi.StandardErrorCode.NESTED_WINDOW;
import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.prestosql.spi.StandardErrorCode.NUMERIC_VALUE_OUT_OF_RANGE;
import static io.prestosql.spi.StandardErrorCode.SCHEMA_NOT_FOUND;
import static io.prestosql.spi.StandardErrorCode.SYNTAX_ERROR;
import static io.prestosql.spi.StandardErrorCode.TABLE_NOT_FOUND;
import static io.prestosql.spi.StandardErrorCode.TOO_MANY_ARGUMENTS;
import static io.prestosql.spi.StandardErrorCode.TOO_MANY_GROUPING_SETS;
import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH;
import static io.prestosql.spi.StandardErrorCode.VIEW_IS_RECURSIVE;
import static io.prestosql.spi.StandardErrorCode.VIEW_IS_STALE;
import static io.prestosql.spi.connector.ConnectorViewDefinition.ViewColumn;
import static io.prestosql.spi.session.PropertyMetadata.integerProperty;
import static io.prestosql.spi.session.PropertyMetadata.stringProperty;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.TypeSignature.parseTypeSignature;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.testing.TestingSession.testSessionBuilder;
import static io.prestosql.testing.assertions.PrestoExceptionAssert.assertPrestoExceptionThrownBy;
import static io.prestosql.transaction.InMemoryTransactionManager.createTestTransactionManager;
import static io.prestosql.transaction.TransactionBuilder.transaction;
import static java.lang.String.format;
import static java.util.Collections.emptyList;
import static java.util.Collections.nCopies;
@Test(singleThreaded = true)
public class TestAnalyzer
{
private static final String TPCH_CATALOG = "tpch";
private static final CatalogName TPCH_CATALOG_NAME = new CatalogName(TPCH_CATALOG);
private static final String SECOND_CATALOG = "c2";
private static final CatalogName SECOND_CATALOG_NAME = new CatalogName(SECOND_CATALOG);
private static final String THIRD_CATALOG = "c3";
private static final CatalogName THIRD_CATALOG_NAME = new CatalogName(THIRD_CATALOG);
private static final Session SETUP_SESSION = testSessionBuilder()
.setCatalog("c1")
.setSchema("s1")
.build();
private static final Session CLIENT_SESSION = testSessionBuilder()
.setCatalog(TPCH_CATALOG)
.setSchema("s1")
.build();
private static final SqlParser SQL_PARSER = new SqlParser();
private TransactionManager transactionManager;
private AccessControl accessControl;
private Metadata metadata;
@Test
public void testTooManyArguments()
{
assertFails("SELECT greatest(" + Joiner.on(", ").join(nCopies(128, "rand()")) + ")")
.hasErrorCode(TOO_MANY_ARGUMENTS);
}
@Test
public void testNonComparableGroupBy()
{
assertFails("SELECT * FROM (SELECT approx_set(1)) GROUP BY 1")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testNonComparableWindowPartition()
{
assertFails("SELECT row_number() OVER (PARTITION BY t.x) FROM (VALUES(CAST (NULL AS HyperLogLog))) AS t(x)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testNonComparableWindowOrder()
{
assertFails("SELECT row_number() OVER (ORDER BY t.x) FROM (VALUES(color('red'))) AS t(x)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testNonComparableDistinctAggregation()
{
assertFails("SELECT count(DISTINCT x) FROM (SELECT approx_set(1) x)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testNonComparableDistinct()
{
assertFails("SELECT DISTINCT * FROM (SELECT approx_set(1) x)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT DISTINCT x FROM (SELECT approx_set(1) x)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testInSubqueryTypes()
{
assertFails("SELECT * FROM (VALUES 'a') t(y) WHERE y IN (VALUES 1)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT (VALUES true) IN (VALUES 1)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testScalarSubQuery()
{
analyze("SELECT 'a', (VALUES 1) GROUP BY 1");
analyze("SELECT 'a', (SELECT (1))");
analyze("SELECT * FROM t1 WHERE (VALUES 1) = 2");
analyze("SELECT * FROM t1 WHERE (VALUES 1) IN (VALUES 1)");
analyze("SELECT * FROM t1 WHERE (VALUES 1) IN (2)");
analyze("SELECT * FROM (SELECT 1) t1(x) WHERE x IN (SELECT 1)");
}
@Test
public void testReferenceToOutputColumnFromOrderByAggregation()
{
assertFails("SELECT max(a) AS a FROM (values (1,2)) t(a,b) GROUP BY b ORDER BY max(a+b)")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("line 1:71: Invalid reference to output projection attribute from ORDER BY aggregation");
assertFails("SELECT DISTINCT a AS a, max(a) AS c from (VALUES (1, 2)) t(a, b) GROUP BY a ORDER BY max(a)")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("line 1:90: Invalid reference to output projection attribute from ORDER BY aggregation");
assertFails("SELECT CAST(ROW(1) AS ROW(someField BIGINT)) AS a FROM (values (1,2)) t(a,b) GROUP BY b ORDER BY MAX(a.someField)")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("line 1:102: Invalid reference to output projection attribute from ORDER BY aggregation");
assertFails("SELECT 1 AS x FROM (values (1,2)) t(x, y) GROUP BY y ORDER BY sum(apply(1, z -> x))")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("line 1:81: Invalid reference to output projection attribute from ORDER BY aggregation");
assertFails("SELECT row_number() over() as a from (values (41, 42), (-41, -42)) t(a,b) group by a+b order by a+b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("\\Qline 1:98: '(a + b)' must be an aggregate expression or appear in GROUP BY clause\\E");
}
@Test
public void testHavingReferencesOutputAlias()
{
assertFails("SELECT sum(a) x FROM t1 HAVING x > 5")
.hasErrorCode(COLUMN_NOT_FOUND);
}
@Test
public void testWildcardWithInvalidPrefix()
{
assertFails("SELECT foo.* FROM t1")
.hasErrorCode(TABLE_NOT_FOUND);
}
@Test
public void testGroupByWithWildcard()
{
assertFails("SELECT * FROM t1 GROUP BY 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT u1.*, u2.* FROM (select a, b + 1 from t1) u1 JOIN (select a, b + 2 from t1) u2 ON u1.a = u2.a GROUP BY u1.a, u2.a, 3")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
}
@Test
public void testGroupByInvalidOrdinal()
{
assertFails("SELECT * FROM t1 GROUP BY 10")
.hasErrorCode(INVALID_COLUMN_REFERENCE);
assertFails("SELECT * FROM t1 GROUP BY 0")
.hasErrorCode(INVALID_COLUMN_REFERENCE);
}
@Test
public void testGroupByWithSubquerySelectExpression()
{
analyze("SELECT (SELECT t1.a) FROM t1 GROUP BY a");
analyze("SELECT (SELECT a) FROM t1 GROUP BY t1.a");
// u.a is not GROUP-ed BY and it is used in select Subquery expression
analyze("SELECT (SELECT u.a FROM (values 1) u(a)) " +
"FROM t1 u GROUP BY b");
assertFails("SELECT (SELECT u.a from (values 1) x(a)) FROM t1 u GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:16: Subquery uses 'u.a' which must appear in GROUP BY clause");
assertFails("SELECT (SELECT a+2) FROM t1 GROUP BY a+1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:16: Subquery uses 'a' which must appear in GROUP BY clause");
assertFails("SELECT (SELECT 1 FROM t1 WHERE a = u.a) FROM t1 u GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:36: Subquery uses 'u.a' which must appear in GROUP BY clause");
// (t1.)a is not part of GROUP BY
assertFails("SELECT (SELECT a as a) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
// u.a is not GROUP-ed BY but select Subquery expression is using a different (shadowing) u.a
analyze("SELECT (SELECT 1 FROM t1 u WHERE a = u.a) FROM t1 u GROUP BY b");
}
@Test
public void testGroupByWithExistsSelectExpression()
{
analyze("SELECT EXISTS(SELECT t1.a) FROM t1 GROUP BY a");
analyze("SELECT EXISTS(SELECT a) FROM t1 GROUP BY t1.a");
// u.a is not GROUP-ed BY and it is used in select Subquery expression
analyze("SELECT EXISTS(SELECT u.a FROM (values 1) u(a)) " +
"FROM t1 u GROUP BY b");
assertFails("SELECT EXISTS(SELECT u.a from (values 1) x(a)) FROM t1 u GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:22: Subquery uses 'u.a' which must appear in GROUP BY clause");
assertFails("SELECT EXISTS(SELECT a+2) FROM t1 GROUP BY a+1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:22: Subquery uses 'a' which must appear in GROUP BY clause");
assertFails("SELECT EXISTS(SELECT 1 FROM t1 WHERE a = u.a) FROM t1 u GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:42: Subquery uses 'u.a' which must appear in GROUP BY clause");
// (t1.)a is not part of GROUP BY
assertFails("SELECT EXISTS(SELECT a as a) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
// u.a is not GROUP-ed BY but select Subquery expression is using a different (shadowing) u.a
analyze("SELECT EXISTS(SELECT 1 FROM t1 u WHERE a = u.a) FROM t1 u GROUP BY b");
}
@Test
public void testGroupByWithSubquerySelectExpressionWithDereferenceExpression()
{
analyze("SELECT (SELECT t.a.someField) " +
"FROM (VALUES ROW(CAST(ROW(1) AS ROW(someField BIGINT)), 2)) t(a, b) " +
"GROUP BY a");
assertFails("SELECT (SELECT t.a.someField) " +
"FROM (VALUES ROW(CAST(ROW(1) AS ROW(someField BIGINT)), 2)) t(a, b) " +
"GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:16: Subquery uses 't.a' which must appear in GROUP BY clause");
}
@Test
public void testOrderByInvalidOrdinal()
{
assertFails("SELECT * FROM t1 ORDER BY 10")
.hasErrorCode(INVALID_COLUMN_REFERENCE);
assertFails("SELECT * FROM t1 ORDER BY 0")
.hasErrorCode(INVALID_COLUMN_REFERENCE);
}
@Test
public void testOrderByNonComparable()
{
assertFails("SELECT x FROM (SELECT approx_set(1) x) ORDER BY 1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM (SELECT approx_set(1) x) ORDER BY 1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT x FROM (SELECT approx_set(1) x) ORDER BY x")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testOffsetInvalidRowCount()
{
assertFails("SELECT * FROM t1 OFFSET 987654321098765432109876543210 ROWS")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testFetchFirstInvalidRowCount()
{
assertFails("SELECT * FROM t1 FETCH FIRST 987654321098765432109876543210 ROWS ONLY")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM t1 FETCH FIRST 0 ROWS ONLY")
.hasErrorCode(NUMERIC_VALUE_OUT_OF_RANGE);
}
@Test
public void testFetchFirstWithTiesMissingOrderBy()
{
assertFails("SELECT * FROM t1 FETCH FIRST 5 ROWS WITH TIES")
.hasErrorCode(MISSING_ORDER_BY);
// ORDER BY clause must be in the same scope as FETCH FIRST WITH TIES
assertFails("SELECT * FROM (SELECT * FROM (values 1, 3, 2) t(a) ORDER BY a) FETCH FIRST 5 ROWS WITH TIES")
.hasErrorCode(MISSING_ORDER_BY);
}
@Test
public void testLimitInvalidRowCount()
{
assertFails("SELECT * FROM t1 LIMIT 987654321098765432109876543210")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testNestedAggregation()
{
assertFails("SELECT sum(count(*)) FROM t1")
.hasErrorCode(NESTED_AGGREGATION);
}
@Test
public void testAggregationsNotAllowed()
{
assertFails("SELECT * FROM t1 WHERE sum(a) > 1")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT * FROM t1 GROUP BY sum(a)")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT * FROM t1 JOIN t2 ON sum(t1.a) = t2.a")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
}
@Test
public void testWindowsNotAllowed()
{
assertFails("SELECT * FROM t1 WHERE foo() over () > 1")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT * FROM t1 GROUP BY rank() over ()")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT * FROM t1 JOIN t2 ON sum(t1.a) over () = t2.a")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT 1 FROM (VALUES 1) HAVING count(*) OVER () > 1")
.hasErrorCode(NESTED_WINDOW);
}
@Test
public void testGrouping()
{
analyze("SELECT a, b, sum(c), grouping(a, b) FROM t1 GROUP BY GROUPING SETS ((a), (a, b))");
analyze("SELECT grouping(t1.a) FROM t1 GROUP BY a");
analyze("SELECT grouping(b) FROM t1 GROUP BY t1.b");
analyze("SELECT grouping(a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a) FROM t1 GROUP BY a");
}
@Test
public void testGroupingNotAllowed()
{
assertFails("SELECT a, b, sum(c) FROM t1 WHERE grouping(a, b) GROUP BY GROUPING SETS ((a), (a, b))")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT a, b, sum(c) FROM t1 GROUP BY grouping(a, b)")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT t1.a, t1.b FROM t1 JOIN t2 ON grouping(t1.a, t1.b) > t2.a")
.hasErrorCode(EXPRESSION_NOT_SCALAR);
assertFails("SELECT grouping(a) FROM t1")
.hasErrorCode(MISSING_GROUP_BY);
assertFails("SELECT * FROM t1 ORDER BY grouping(a)")
.hasErrorCode(MISSING_GROUP_BY);
assertFails("SELECT grouping(a) FROM t1 GROUP BY b")
.hasErrorCode(INVALID_ARGUMENTS);
assertFails("SELECT grouping(a.field) FROM (VALUES ROW(CAST(ROW(1) AS ROW(field BIGINT)))) t(a) GROUP BY a.field")
.hasErrorCode(INVALID_ARGUMENTS);
assertFails("SELECT a FROM t1 GROUP BY a ORDER BY grouping(a)")
.hasErrorCode(INVALID_ARGUMENTS);
}
@Test
public void testGroupingTooManyArguments()
{
String grouping = "GROUPING(a, a, a, a, a, a, a, a, a, a, a, a, a, a, a," +
"a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a," +
"a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a," +
"a, a)";
assertFails(format("SELECT a, b, %s + 1 FROM t1 GROUP BY GROUPING SETS ((a), (a, b))", grouping))
.hasErrorCode(TOO_MANY_ARGUMENTS);
assertFails(format("SELECT a, b, %s as g FROM t1 GROUP BY a, b HAVING g > 0", grouping))
.hasErrorCode(TOO_MANY_ARGUMENTS);
assertFails(format("SELECT a, b, rank() OVER (PARTITION BY %s) FROM t1 GROUP BY GROUPING SETS ((a), (a, b))", grouping))
.hasErrorCode(TOO_MANY_ARGUMENTS);
assertFails(format("SELECT a, b, rank() OVER (PARTITION BY a ORDER BY %s) FROM t1 GROUP BY GROUPING SETS ((a), (a, b))", grouping))
.hasErrorCode(TOO_MANY_ARGUMENTS);
}
@Test
public void testInvalidTable()
{
assertFails("SELECT * FROM foo.bar.t")
.hasErrorCode(CATALOG_NOT_FOUND);
assertFails("SELECT * FROM foo.t")
.hasErrorCode(SCHEMA_NOT_FOUND);
assertFails("SELECT * FROM foo")
.hasErrorCode(TABLE_NOT_FOUND);
}
@Test
public void testInvalidSchema()
{
assertFails("SHOW TABLES FROM NONEXISTENT_SCHEMA")
.hasErrorCode(SCHEMA_NOT_FOUND);
assertFails("SHOW TABLES IN NONEXISTENT_SCHEMA LIKE '%'")
.hasErrorCode(SCHEMA_NOT_FOUND);
}
@Test
public void testNonAggregate()
{
assertFails("SELECT 'a', array[b][1] FROM t1 GROUP BY 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT a, sum(b) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT sum(b) / a FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT sum(b) / a FROM t1 GROUP BY c")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT sum(b) FROM t1 ORDER BY a + 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT a, sum(b) FROM t1 GROUP BY a HAVING c > 5")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT count(*) over (PARTITION BY a) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT count(*) over (ORDER BY a) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT count(*) over (ORDER BY count(*) ROWS a PRECEDING) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT count(*) over (ORDER BY count(*) ROWS BETWEEN b PRECEDING AND a PRECEDING) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT count(*) over (ORDER BY count(*) ROWS BETWEEN a PRECEDING AND UNBOUNDED PRECEDING) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
}
@Test
public void testInvalidAttribute()
{
assertFails("SELECT f FROM t1")
.hasErrorCode(COLUMN_NOT_FOUND);
assertFails("SELECT * FROM t1 ORDER BY f")
.hasErrorCode(COLUMN_NOT_FOUND);
assertFails("SELECT count(*) FROM t1 GROUP BY f")
.hasErrorCode(COLUMN_NOT_FOUND);
assertFails("SELECT * FROM t1 WHERE f > 1")
.hasErrorCode(COLUMN_NOT_FOUND);
}
@Test
public void testInvalidAttributeCorrectErrorMessage()
{
assertFails("SELECT t.y FROM (VALUES 1) t(x)")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("\\Qline 1:8: Column 't.y' cannot be resolved\\E");
}
@Test
public void testOrderByMustAppearInSelectWithDistinct()
{
assertFails("SELECT DISTINCT a FROM t1 ORDER BY b")
.hasErrorCode(EXPRESSION_NOT_IN_DISTINCT);
}
@Test
public void testNonDeterministicOrderBy()
{
analyze("SELECT DISTINCT random() as b FROM t1 ORDER BY b");
analyze("SELECT random() FROM t1 ORDER BY random()");
analyze("SELECT a FROM t1 ORDER BY random()");
assertFails("SELECT DISTINCT random() FROM t1 ORDER BY random()")
.hasErrorCode(EXPRESSION_NOT_IN_DISTINCT);
}
@Test
public void testNonBooleanWhereClause()
{
assertFails("SELECT * FROM t1 WHERE a")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testDistinctAggregations()
{
analyze("SELECT COUNT(DISTINCT a), SUM(a) FROM t1");
}
@Test
public void testMultipleDistinctAggregations()
{
analyze("SELECT COUNT(DISTINCT a), COUNT(DISTINCT b) FROM t1");
}
@Test
public void testOrderByExpressionOnOutputColumn()
{
// TODO: analyze output
analyze("SELECT a x FROM t1 ORDER BY x + 1");
analyze("SELECT max(a) FROM (values (1,2), (2,1)) t(a,b) GROUP BY b ORDER BY max(b*1e0)");
analyze("SELECT CAST(ROW(1) AS ROW(someField BIGINT)) AS a FROM (values (1,2)) t(a,b) GROUP BY b ORDER BY a.someField");
analyze("SELECT 1 AS x FROM (values (1,2)) t(x, y) GROUP BY y ORDER BY sum(apply(1, x -> x))");
}
@Test
public void testOrderByExpressionOnOutputColumn2()
{
// TODO: validate output
analyze("SELECT a x FROM t1 ORDER BY a + 1");
assertFails("SELECT x.c as x\n" +
"FROM (VALUES 1) x(c)\n" +
"ORDER BY x.c")
.hasErrorCode(TYPE_MISMATCH)
.hasLocation(3, 10);
}
@Test
public void testOrderByWithWildcard()
{
// TODO: validate output
analyze("SELECT t1.* FROM t1 ORDER BY a");
}
@Test
public void testOrderByWithGroupByAndSubquerySelectExpression()
{
analyze("SELECT a FROM t1 GROUP BY a ORDER BY (SELECT a)");
assertFails("SELECT a FROM t1 GROUP BY a ORDER BY (SELECT b)")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:46: Subquery uses 'b' which must appear in GROUP BY clause");
analyze("SELECT a AS b FROM t1 GROUP BY t1.a ORDER BY (SELECT b)");
assertFails("SELECT a AS b FROM t1 GROUP BY t1.a \n" +
"ORDER BY MAX((SELECT b))")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("line 2:22: Invalid reference to output projection attribute from ORDER BY aggregation");
analyze("SELECT a FROM t1 GROUP BY a ORDER BY MAX((SELECT x FROM (VALUES 4) t(x)))");
analyze("SELECT CAST(ROW(1) AS ROW(someField BIGINT)) AS x\n" +
"FROM (VALUES (1, 2)) t(a, b)\n" +
"GROUP BY b\n" +
"ORDER BY (SELECT x.someField)");
assertFails("SELECT CAST(ROW(1) AS ROW(someField BIGINT)) AS x\n" +
"FROM (VALUES (1, 2)) t(a, b)\n" +
"GROUP BY b\n" +
"ORDER BY MAX((SELECT x.someField))")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching("line 4:22: Invalid reference to output projection attribute from ORDER BY aggregation");
}
@Test
public void testTooManyGroupingElements()
{
Session session = testSessionBuilder(new SessionPropertyManager(new SystemSessionProperties(
new QueryManagerConfig(),
new TaskManagerConfig(),
new MemoryManagerConfig(),
new FeaturesConfig().setMaxGroupingSets(2048)))).build();
analyze(session, "SELECT a, b, c, d, e, f, g, h, i, j, k, SUM(l)" +
"FROM (VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))\n" +
"t (a, b, c, d, e, f, g, h, i, j, k, l)\n" +
"GROUP BY CUBE (a, b, c, d, e, f), CUBE (g, h, i, j, k)");
assertFails(session, "SELECT a, b, c, d, e, f, g, h, i, j, k, l, SUM(m)" +
"FROM (VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))\n" +
"t (a, b, c, d, e, f, g, h, i, j, k, l, m)\n" +
"GROUP BY CUBE (a, b, c, d, e, f), CUBE (g, h, i, j, k, l)")
.hasErrorCode(TOO_MANY_GROUPING_SETS)
.hasMessageMatching("line 3:10: GROUP BY has 4096 grouping sets but can contain at most 2048");
assertFails(session, "SELECT a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, " +
"q, r, s, t, u, v, x, w, y, z, aa, ab, ac, ad, ae, SUM(af)" +
"FROM (VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, " +
"17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32))\n" +
"t (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, " +
"q, r, s, t, u, v, x, w, y, z, aa, ab, ac, ad, ae, af)\n" +
"GROUP BY CUBE (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, " +
"q, r, s, t, u, v, x, w, y, z, aa, ab, ac, ad, ae)")
.hasErrorCode(TOO_MANY_GROUPING_SETS)
.hasMessageMatching(format("line 3:10: GROUP BY has more than %s grouping sets but can contain at most 2048", Integer.MAX_VALUE));
}
@Test
public void testMismatchedColumnAliasCount()
{
assertFails("SELECT * FROM t1 u (x, y)")
.hasErrorCode(MISMATCHED_COLUMN_ALIASES);
}
@Test
public void testJoinOnConstantExpression()
{
analyze("SELECT * FROM t1 JOIN t2 ON 1 = 1");
}
@Test
public void testJoinOnNonBooleanExpression()
{
assertFails("SELECT * FROM t1 JOIN t2 ON 5")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testJoinOnAmbiguousName()
{
assertFails("SELECT * FROM t1 JOIN t2 ON a = a")
.hasErrorCode(AMBIGUOUS_NAME);
}
@Test
public void testNonEquiOuterJoin()
{
analyze("SELECT * FROM t1 LEFT JOIN t2 ON t1.a + t2.a = 1");
analyze("SELECT * FROM t1 RIGHT JOIN t2 ON t1.a + t2.a = 1");
analyze("SELECT * FROM t1 LEFT JOIN t2 ON t1.a = t2.a OR t1.b = t2.b");
}
@Test
public void testNonBooleanHaving()
{
assertFails("SELECT sum(a) FROM t1 HAVING sum(a)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testAmbiguousReferenceInOrderBy()
{
assertFails("SELECT a x, b x FROM t1 ORDER BY x")
.hasErrorCode(AMBIGUOUS_NAME);
assertFails("SELECT a x, a x FROM t1 ORDER BY x")
.hasErrorCode(AMBIGUOUS_NAME);
assertFails("SELECT a, a FROM t1 ORDER BY a")
.hasErrorCode(AMBIGUOUS_NAME);
}
@Test
public void testImplicitCrossJoin()
{
// TODO: validate output
analyze("SELECT * FROM t1, t2");
}
@Test
public void testNaturalJoinNotSupported()
{
assertFails("SELECT * FROM t1 NATURAL JOIN t2")
.hasErrorCode(NOT_SUPPORTED);
}
@Test
public void testNestedWindowFunctions()
{
assertFails("SELECT avg(sum(a) OVER ()) FROM t1")
.hasErrorCode(NESTED_WINDOW);
assertFails("SELECT sum(sum(a) OVER ()) OVER () FROM t1")
.hasErrorCode(NESTED_WINDOW);
assertFails("SELECT avg(a) OVER (PARTITION BY sum(b) OVER ()) FROM t1")
.hasErrorCode(NESTED_WINDOW);
assertFails("SELECT avg(a) OVER (ORDER BY sum(b) OVER ()) FROM t1")
.hasErrorCode(NESTED_WINDOW);
}
@Test
public void testWindowFunctionWithoutOverClause()
{
assertFails("SELECT row_number()")
.hasErrorCode(MISSING_OVER);
assertFails("SELECT coalesce(lead(a), 0) from (values(0)) t(a)")
.hasErrorCode(MISSING_OVER);
}
@Test
public void testInvalidWindowFrame()
{
assertFails("SELECT rank() OVER (ROWS UNBOUNDED FOLLOWING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS 2 FOLLOWING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS BETWEEN UNBOUNDED FOLLOWING AND CURRENT ROW)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND UNBOUNDED PRECEDING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND 5 PRECEDING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS BETWEEN 2 FOLLOWING AND 5 PRECEDING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS BETWEEN 2 FOLLOWING AND CURRENT ROW)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (RANGE 2 PRECEDING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (RANGE BETWEEN 2 PRECEDING AND CURRENT ROW)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (RANGE BETWEEN CURRENT ROW AND 5 FOLLOWING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (RANGE BETWEEN 2 PRECEDING AND 5 FOLLOWING)")
.hasErrorCode(INVALID_WINDOW_FRAME);
assertFails("SELECT rank() OVER (ROWS 5e-1 PRECEDING)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT rank() OVER (ROWS 'foo' PRECEDING)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND 5e-1 FOLLOWING)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND 'foo' FOLLOWING)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testDistinctInWindowFunctionParameter()
{
assertFails("SELECT a, count(DISTINCT b) OVER () FROM t1")
.hasErrorCode(NOT_SUPPORTED);
}
@Test
public void testGroupByOrdinalsWithWildcard()
{
// TODO: verify output
analyze("SELECT t1.*, a FROM t1 GROUP BY 1,2,c,d");
}
@Test
public void testGroupByWithQualifiedName()
{
// TODO: verify output
analyze("SELECT a FROM t1 GROUP BY t1.a");
}
@Test
public void testGroupByWithQualifiedName2()
{
// TODO: verify output
analyze("SELECT t1.a FROM t1 GROUP BY a");
}
@Test
public void testGroupByWithQualifiedName3()
{
// TODO: verify output
analyze("SELECT * FROM t1 GROUP BY t1.a, t1.b, t1.c, t1.d");
}
@Test
public void testGroupByWithRowExpression()
{
// TODO: verify output
analyze("SELECT (a, b) FROM t1 GROUP BY a, b");
}
@Test
public void testHaving()
{
// TODO: verify output
analyze("SELECT sum(a) FROM t1 HAVING avg(a) - avg(b) > 10");
assertFails("SELECT a FROM t1 HAVING a = 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:8: 'a' must be an aggregate expression or appear in GROUP BY clause");
}
@Test
public void testWithCaseInsensitiveResolution()
{
// TODO: verify output
analyze("WITH AB AS (SELECT * FROM t1) SELECT * FROM ab");
}
@Test
public void testStartTransaction()
{
analyze("START TRANSACTION");
analyze("START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED");
analyze("START TRANSACTION ISOLATION LEVEL READ COMMITTED");
analyze("START TRANSACTION ISOLATION LEVEL REPEATABLE READ");
analyze("START TRANSACTION ISOLATION LEVEL SERIALIZABLE");
analyze("START TRANSACTION READ ONLY");
analyze("START TRANSACTION READ WRITE");
analyze("START TRANSACTION ISOLATION LEVEL READ COMMITTED, READ ONLY");
analyze("START TRANSACTION READ ONLY, ISOLATION LEVEL READ COMMITTED");
analyze("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE");
}
@Test
public void testCommit()
{
analyze("COMMIT");
analyze("COMMIT WORK");
}
@Test
public void testRollback()
{
analyze("ROLLBACK");
analyze("ROLLBACK WORK");
}
@Test
public void testExplainAnalyze()
{
analyze("EXPLAIN ANALYZE SELECT * FROM t1");
}
@Test
public void testInsert()
{
assertFails("INSERT INTO t6 (a) SELECT b from t6")
.hasErrorCode(TYPE_MISMATCH);
analyze("INSERT INTO t1 SELECT * FROM t1");
analyze("INSERT INTO t3 SELECT * FROM t3");
analyze("INSERT INTO t3 SELECT a, b FROM t3");
assertFails("INSERT INTO t1 VALUES (1, 2)")
.hasErrorCode(TYPE_MISMATCH);
analyze("INSERT INTO t5 (a) VALUES(null)");
// ignore t5 hidden column
analyze("INSERT INTO t5 VALUES (1)");
// fail if hidden column provided
assertFails("INSERT INTO t5 VALUES (1, 2)")
.hasErrorCode(TYPE_MISMATCH);
// note b is VARCHAR, while a,c,d are BIGINT
analyze("INSERT INTO t6 (a) SELECT a from t6");
analyze("INSERT INTO t6 (a) SELECT c from t6");
analyze("INSERT INTO t6 (a,b,c,d) SELECT * from t6");
analyze("INSERT INTO t6 (A,B,C,D) SELECT * from t6");
analyze("INSERT INTO t6 (a,b,c,d) SELECT d,b,c,a from t6");
assertFails("INSERT INTO t6 (a) SELECT b from t6")
.hasErrorCode(TYPE_MISMATCH);
assertFails("INSERT INTO t6 (unknown) SELECT * FROM t6")
.hasErrorCode(COLUMN_NOT_FOUND);
assertFails("INSERT INTO t6 (a, a) SELECT * FROM t6")
.hasErrorCode(DUPLICATE_COLUMN_NAME);
assertFails("INSERT INTO t6 (a, A) SELECT * FROM t6")
.hasErrorCode(DUPLICATE_COLUMN_NAME);
// b is bigint, while a is double, coercion from b to a is possible
analyze("INSERT INTO t7 (b) SELECT (a) FROM t7 ");
assertFails("INSERT INTO t7 (a) SELECT (b) FROM t7")
.hasErrorCode(TYPE_MISMATCH);
// d is array of bigints, while c is array of doubles, coercion from d to c is possible
analyze("INSERT INTO t7 (d) SELECT (c) FROM t7 ");
assertFails("INSERT INTO t7 (c) SELECT (d) FROM t7 ")
.hasErrorCode(TYPE_MISMATCH);
analyze("INSERT INTO t7 (d) VALUES (ARRAY[null])");
analyze("INSERT INTO t6 (d) VALUES (1), (2), (3)");
analyze("INSERT INTO t6 (a,b,c,d) VALUES (1, 'a', 1, 1), (2, 'b', 2, 2), (3, 'c', 3, 3), (4, 'd', 4, 4)");
}
@Test
public void testInvalidInsert()
{
assertFails("INSERT INTO foo VALUES (1)")
.hasErrorCode(TABLE_NOT_FOUND);
assertFails("INSERT INTO v1 VALUES (1)")
.hasErrorCode(NOT_SUPPORTED);
// fail if inconsistent fields count
assertFails("INSERT INTO t1 (a) VALUES (1), (1, 2)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("INSERT INTO t1 (a, b) VALUES (1), (1, 2)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("INSERT INTO t1 (a, b) VALUES (1, 2), (1, 2), (1, 2, 3)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("INSERT INTO t1 (a, b) VALUES ('a', 'b'), ('a', 'b', 'c')")
.hasErrorCode(TYPE_MISMATCH);
// fail if mismatched column types
assertFails("INSERT INTO t1 (a, b) VALUES ('a', 'b'), (1, 'b')")
.hasErrorCode(TYPE_MISMATCH);
assertFails("INSERT INTO t1 (a, b) VALUES ('a', 'b'), ('a', 'b'), (1, 'b')")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testDuplicateWithQuery()
{
assertFails("WITH a AS (SELECT * FROM t1)," +
" a AS (SELECT * FROM t1)" +
"SELECT * FROM a")
.hasErrorCode(DUPLICATE_NAMED_QUERY);
}
@Test
public void testCaseInsensitiveDuplicateWithQuery()
{
assertFails("WITH a AS (SELECT * FROM t1)," +
" A AS (SELECT * FROM t1)" +
"SELECT * FROM a")
.hasErrorCode(DUPLICATE_NAMED_QUERY);
}
@Test
public void testWithForwardReference()
{
assertFails("WITH a AS (SELECT * FROM b)," +
" b AS (SELECT * FROM t1)" +
"SELECT * FROM a")
.hasErrorCode(TABLE_NOT_FOUND);
}
@Test
public void testExpressions()
{
// logical not
assertFails("SELECT NOT 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// logical and/or
assertFails("SELECT 1 AND TRUE FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT TRUE AND 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1 OR TRUE FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT TRUE OR 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// comparison
assertFails("SELECT 1 = 'a' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// nullif
assertFails("SELECT NULLIF(1, 'a') FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// case
assertFails("SELECT CASE WHEN TRUE THEN 'a' ELSE 1 END FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CASE WHEN '1' THEN 1 ELSE 2 END FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CASE 1 WHEN 'a' THEN 2 END FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CASE 1 WHEN 1 THEN 2 ELSE 'a' END FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// coalesce
assertFails("SELECT COALESCE(1, 'a') FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// cast
assertFails("SELECT CAST(date '2014-01-01' AS bigint)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT TRY_CAST(date '2014-01-01' AS bigint)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CAST(null AS UNKNOWN)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CAST(1 AS MAP)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CAST(1 AS ARRAY)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT CAST(1 AS ROW)")
.hasErrorCode(TYPE_MISMATCH);
// arithmetic unary
assertFails("SELECT -'a' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT +'a' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// arithmetic addition/subtraction
assertFails("SELECT 'a' + 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1 + 'a' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 'a' - 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1 - 'a' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// like
assertFails("SELECT 1 LIKE 'a' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 'a' LIKE 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 'a' LIKE 'b' ESCAPE 1 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// extract
assertFails("SELECT EXTRACT(DAY FROM 'a') FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// between
assertFails("SELECT 1 BETWEEN 'a' AND 2 FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1 BETWEEN 0 AND 'b' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1 BETWEEN 'a' AND 'b' FROM t1")
.hasErrorCode(TYPE_MISMATCH);
// in
assertFails("SELECT * FROM t1 WHERE 1 IN ('a')")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM t1 WHERE 'a' IN (1)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM t1 WHERE 'a' IN (1, 'b')")
.hasErrorCode(TYPE_MISMATCH);
// row type
assertFails("SELECT t.x.f1 FROM (VALUES 1) t(x)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT x.f1 FROM (VALUES 1) t(x)")
.hasErrorCode(TYPE_MISMATCH);
// subscript on Row
assertFails("SELECT ROW(1, 'a')[x]")
.hasErrorCode(EXPRESSION_NOT_CONSTANT)
.hasMessageMatching("line 1:20: Subscript expression on ROW requires a constant index");
assertFails("SELECT ROW(1, 'a')[9999999999]")
.hasErrorCode(TYPE_MISMATCH)
.hasMessageMatching("line 1:20: Subscript expression on ROW requires integer index, found bigint");
assertFails("SELECT ROW(1, 'a')[-1]")
.hasErrorCode(INVALID_FUNCTION_ARGUMENT)
.hasMessageMatching("line 1:20: Invalid subscript index: -1. ROW indices start at 1");
assertFails("SELECT ROW(1, 'a')[0]")
.hasErrorCode(INVALID_FUNCTION_ARGUMENT)
.hasMessageMatching("line 1:20: Invalid subscript index: 0. ROW indices start at 1");
assertFails("SELECT ROW(1, 'a')[5]")
.hasErrorCode(INVALID_FUNCTION_ARGUMENT)
.hasMessageMatching("line 1:20: Subscript index out of bounds: 5, max value is 2");
}
@Test
public void testLike()
{
analyze("SELECT '1' LIKE '1'");
analyze("SELECT CAST('1' as CHAR(1)) LIKE '1'");
}
@Test(enabled = false) // TODO: need to support widening conversion for numbers
public void testInWithNumericTypes()
{
analyze("SELECT * FROM t1 WHERE 1 IN (1, 2, 3.5)");
}
@Test
public void testWildcardWithoutFrom()
{
assertFails("SELECT *")
.hasErrorCode(COLUMN_NOT_FOUND);
}
@Test
public void testReferenceWithoutFrom()
{
assertFails("SELECT dummy")
.hasErrorCode(COLUMN_NOT_FOUND);
}
@Test
public void testGroupBy()
{
// TODO: validate output
analyze("SELECT a, SUM(b) FROM t1 GROUP BY a");
}
@Test
public void testGroupByEmpty()
{
assertFails("SELECT a FROM t1 GROUP BY ()")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
}
@Test
public void testComplexExpressionInGroupingSet()
{
assertFails("SELECT 1 FROM (VALUES 1) t(x) GROUP BY ROLLUP(x + 1)")
.hasErrorCode(INVALID_COLUMN_REFERENCE)
.hasMessageMatching("\\Qline 1:49: GROUP BY expression must be a column reference: (x + 1)\\E");
assertFails("SELECT 1 FROM (VALUES 1) t(x) GROUP BY CUBE(x + 1)")
.hasErrorCode(INVALID_COLUMN_REFERENCE)
.hasMessageMatching("\\Qline 1:47: GROUP BY expression must be a column reference: (x + 1)\\E");
assertFails("SELECT 1 FROM (VALUES 1) t(x) GROUP BY GROUPING SETS (x + 1)")
.hasErrorCode(INVALID_COLUMN_REFERENCE)
.hasMessageMatching("\\Qline 1:57: GROUP BY expression must be a column reference: (x + 1)\\E");
assertFails("SELECT 1 FROM (VALUES 1) t(x) GROUP BY ROLLUP(x, x + 1)")
.hasErrorCode(INVALID_COLUMN_REFERENCE)
.hasMessageMatching("\\Qline 1:52: GROUP BY expression must be a column reference: (x + 1)\\E");
assertFails("SELECT 1 FROM (VALUES 1) t(x) GROUP BY CUBE(x, x + 1)")
.hasErrorCode(INVALID_COLUMN_REFERENCE)
.hasMessageMatching("\\Qline 1:50: GROUP BY expression must be a column reference: (x + 1)\\E");
assertFails("SELECT 1 FROM (VALUES 1) t(x) GROUP BY GROUPING SETS (x, x + 1)")
.hasErrorCode(INVALID_COLUMN_REFERENCE)
.hasMessageMatching("\\Qline 1:60: GROUP BY expression must be a column reference: (x + 1)\\E");
}
@Test
public void testSingleGroupingSet()
{
// TODO: validate output
analyze("SELECT SUM(b) FROM t1 GROUP BY ()");
analyze("SELECT SUM(b) FROM t1 GROUP BY GROUPING SETS (())");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS (a)");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS (a)");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b))");
}
@Test
public void testMultipleGroupingSetMultipleColumns()
{
// TODO: validate output
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b), (c, d))");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY a, b, GROUPING SETS ((c, d))");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a), (c, d))");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b)), ROLLUP (c, d)");
analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b)), CUBE (c, d)");
}
@Test
public void testAggregateWithWildcard()
{
assertFails("SELECT * FROM (SELECT a + 1, b FROM t1) t GROUP BY b ORDER BY 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("Column 1 not in GROUP BY clause");
assertFails("SELECT * FROM (SELECT a, b FROM t1) t GROUP BY b ORDER BY 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("Column 't.a' not in GROUP BY clause");
assertFails("SELECT * FROM (SELECT a, b FROM t1) GROUP BY b ORDER BY 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("Column 'a' not in GROUP BY clause");
assertFails("SELECT * FROM (SELECT a + 1, b FROM t1) GROUP BY b ORDER BY 1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("Column 1 not in GROUP BY clause");
}
@Test
public void testGroupByCase()
{
assertFails("SELECT CASE a WHEN 1 THEN 'a' ELSE 'b' END, count(*) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT CASE 1 WHEN 2 THEN a ELSE 0 END, count(*) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT CASE 1 WHEN 2 THEN 0 ELSE a END, count(*) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT CASE WHEN a = 1 THEN 'a' ELSE 'b' END, count(*) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT CASE WHEN true THEN a ELSE 0 END, count(*) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT CASE WHEN true THEN 0 ELSE a END, count(*) FROM t1")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
}
@Test
public void testGroupingWithWrongColumnsAndNoGroupBy()
{
assertFails("SELECT a, SUM(b), GROUPING(a, b, c, d) FROM t1 GROUP BY GROUPING SETS ((a, b), (c))")
.hasErrorCode(INVALID_ARGUMENTS);
assertFails("SELECT a, SUM(b), GROUPING(a, b) FROM t1")
.hasErrorCode(MISSING_GROUP_BY);
}
@Test
public void testMismatchedUnionQueries()
{
assertFails("SELECT 1 UNION SELECT 'a'")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT a FROM t1 UNION SELECT 'a'")
.hasErrorCode(TYPE_MISMATCH);
assertFails("(SELECT 1) UNION SELECT 'a'")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1, 2 UNION SELECT 1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 'a' UNION SELECT 'b', 'c'")
.hasErrorCode(TYPE_MISMATCH);
assertFails("TABLE t2 UNION SELECT 'a'")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 123, 'foo' UNION ALL SELECT 'bar', 999")
.hasErrorCode(TYPE_MISMATCH)
.hasMessageMatching(".* column 1 in UNION query has incompatible types.*");
assertFails("SELECT 123, 123 UNION ALL SELECT 999, 'bar'")
.hasErrorCode(TYPE_MISMATCH)
.hasMessageMatching(".* column 2 in UNION query has incompatible types.*");
}
@Test
public void testUnionUnmatchedOrderByAttribute()
{
assertFails("TABLE t2 UNION ALL SELECT c, d FROM t1 ORDER BY c")
.hasErrorCode(COLUMN_NOT_FOUND);
}
@Test
public void testGroupByComplexExpressions()
{
assertFails("SELECT IF(a IS NULL, 1, 0) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT IF(a IS NOT NULL, 1, 0) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT IF(CAST(a AS VARCHAR) LIKE 'a', 1, 0) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT a IN (1, 2, 3) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
assertFails("SELECT 1 IN (a, 2, 3) FROM t1 GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE);
}
@Test
public void testNonNumericTableSamplePercentage()
{
assertFails("SELECT * FROM t1 TABLESAMPLE BERNOULLI ('a')")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM t1 TABLESAMPLE BERNOULLI (a + 1)")
.hasErrorCode(EXPRESSION_NOT_CONSTANT);
}
@Test
public void testTableSampleOutOfRange()
{
assertFails("SELECT * FROM t1 TABLESAMPLE BERNOULLI (-1)")
.hasErrorCode(NUMERIC_VALUE_OUT_OF_RANGE);
assertFails("SELECT * FROM t1 TABLESAMPLE BERNOULLI (-101)")
.hasErrorCode(NUMERIC_VALUE_OUT_OF_RANGE);
}
@Test
public void testCreateTableAsColumns()
{
// TODO: validate output
analyze("CREATE TABLE test(a) AS SELECT 123");
analyze("CREATE TABLE test(a, b) AS SELECT 1, 2");
analyze("CREATE TABLE test(a) AS (VALUES 1)");
assertFails("CREATE TABLE test AS SELECT 123")
.hasErrorCode(MISSING_COLUMN_NAME);
assertFails("CREATE TABLE test AS SELECT 1 a, 2 a")
.hasErrorCode(DUPLICATE_COLUMN_NAME);
assertFails("CREATE TABLE test AS SELECT null a")
.hasErrorCode(COLUMN_TYPE_UNKNOWN);
assertFails("CREATE TABLE test(x) AS SELECT 1, 2")
.hasErrorCode(MISMATCHED_COLUMN_ALIASES)
.hasLocation(1, 19);
assertFails("CREATE TABLE test(x, y) AS SELECT 1")
.hasErrorCode(MISMATCHED_COLUMN_ALIASES)
.hasLocation(1, 19);
assertFails("CREATE TABLE test(x, y) AS (VALUES 1)")
.hasErrorCode(MISMATCHED_COLUMN_ALIASES)
.hasLocation(1, 19);
assertFails("CREATE TABLE test(abc, AbC) AS SELECT 1, 2")
.hasErrorCode(DUPLICATE_COLUMN_NAME)
.hasLocation(1, 24);
assertFails("CREATE TABLE test(x) AS SELECT null")
.hasErrorCode(COLUMN_TYPE_UNKNOWN)
.hasLocation(1, 1);
assertFails("CREATE TABLE test(x) WITH (p1 = y) AS SELECT null")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching(".*Column 'y' cannot be resolved");
assertFails("CREATE TABLE test(x) WITH (p1 = 'p1', p2 = 'p2', p1 = 'p3') AS SELECT null")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
assertFails("CREATE TABLE test(x) WITH (p1 = 'p1', \"p1\" = 'p2') AS SELECT null")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
}
@Test
public void testCreateTable()
{
analyze("CREATE TABLE test (id bigint)");
analyze("CREATE TABLE test (id bigint) WITH (p1 = 'p1')");
assertFails("CREATE TABLE test (x bigint) WITH (p1 = y)")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching(".*Column 'y' cannot be resolved");
assertFails("CREATE TABLE test (id bigint) WITH (p1 = 'p1', p2 = 'p2', p1 = 'p3')")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
assertFails("CREATE TABLE test (id bigint) WITH (p1 = 'p1', \"p1\" = 'p2')")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
}
@Test
public void testAnalyze()
{
analyze("ANALYZE t1");
analyze("ANALYZE t1 WITH (p1 = 'p1')");
assertFails("ANALYZE t1 WITH (p1 = 'p1', p2 = 2, p1 = 'p3')")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
assertFails("ANALYZE t1 WITH (p1 = 'p1', \"p1\" = 'p2')")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
}
@Test
public void testCreateSchema()
{
analyze("CREATE SCHEMA test");
analyze("CREATE SCHEMA test WITH (p1 = 'p1')");
assertFails("CREATE SCHEMA test WITH (p1 = y)")
.hasErrorCode(COLUMN_NOT_FOUND)
.hasMessageMatching(".*Column 'y' cannot be resolved");
assertFails("CREATE SCHEMA test WITH (p1 = 'p1', p2 = 'p2', p1 = 'p3')")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
assertFails("CREATE SCHEMA test WITH (p1 = 'p1', \"p1\" = 'p2')")
.hasErrorCode(DUPLICATE_PROPERTY)
.hasMessageMatching(".* Duplicate property: p1");
}
@Test
public void testCreateViewColumns()
{
assertFails("CREATE VIEW test AS SELECT 123")
.hasErrorCode(MISSING_COLUMN_NAME);
assertFails("CREATE VIEW test AS SELECT 1 a, 2 a")
.hasErrorCode(DUPLICATE_COLUMN_NAME);
assertFails("CREATE VIEW test AS SELECT null a")
.hasErrorCode(COLUMN_TYPE_UNKNOWN);
}
@Test
public void testCreateRecursiveView()
{
assertFails("CREATE OR REPLACE VIEW v1 AS SELECT * FROM v1")
.hasErrorCode(VIEW_IS_RECURSIVE);
}
@Test
public void testExistingRecursiveView()
{
analyze("SELECT * FROM v1 a JOIN v1 b ON a.a = b.a");
analyze("SELECT * FROM v1 a JOIN (SELECT * from v1) b ON a.a = b.a");
assertFails("SELECT * FROM v5")
.hasErrorCode(INVALID_VIEW);
}
@Test
public void testShowCreateView()
{
analyze("SHOW CREATE VIEW v1");
analyze("SHOW CREATE VIEW v2");
assertFails("SHOW CREATE VIEW t1")
.hasErrorCode(NOT_SUPPORTED);
assertFails("SHOW CREATE VIEW none")
.hasErrorCode(TABLE_NOT_FOUND);
}
@Test
public void testStaleView()
{
assertFails("SELECT * FROM v2")
.hasErrorCode(VIEW_IS_STALE);
}
@Test
public void testStoredViewAnalysisScoping()
{
// the view must not be analyzed using the query context
analyze("WITH t1 AS (SELECT 123 x) SELECT * FROM v1");
}
@Test
public void testStoredViewResolution()
{
// the view must be analyzed relative to its own catalog/schema
analyze("SELECT * FROM c3.s3.v3");
}
@Test
public void testQualifiedViewColumnResolution()
{
// it should be possible to qualify the column reference with the view name
analyze("SELECT v1.a FROM v1");
analyze("SELECT s1.v1.a FROM s1.v1");
analyze("SELECT tpch.s1.v1.a FROM tpch.s1.v1");
}
@Test
public void testViewWithUppercaseColumn()
{
analyze("SELECT * FROM v4");
}
@Test
public void testUse()
{
assertFails("USE foo")
.hasErrorCode(NOT_SUPPORTED);
}
@Test
public void testNotNullInJoinClause()
{
analyze("SELECT * FROM (VALUES (1)) a (x) JOIN (VALUES (2)) b ON a.x IS NOT NULL");
}
@Test
public void testIfInJoinClause()
{
analyze("SELECT * FROM (VALUES (1)) a (x) JOIN (VALUES (2)) b ON IF(a.x = 1, true, false)");
}
@Test
public void testLiteral()
{
assertFails("SELECT TIMESTAMP '2012-10-31 01:00:00 PT'")
.hasErrorCode(INVALID_LITERAL);
}
@Test
public void testLambda()
{
analyze("SELECT apply(5, x -> abs(x)) from t1");
assertFails("SELECT x -> abs(x) from t1")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testLambdaCapture()
{
analyze("SELECT apply(c1, x -> x + c2) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(c1, c2)");
analyze("SELECT apply(c1 + 10, x -> apply(x + 100, y -> c1)) FROM (VALUES 1) t(c1)");
// reference lambda variable of the not-immediately-enclosing lambda
analyze("SELECT apply(1, x -> apply(10, y -> x)) FROM (VALUES 1000) t(x)");
analyze("SELECT apply(1, x -> apply(10, y -> x)) FROM (VALUES 'abc') t(x)");
analyze("SELECT apply(1, x -> apply(10, y -> apply(100, z -> x))) FROM (VALUES 1000) t(x)");
analyze("SELECT apply(1, x -> apply(10, y -> apply(100, z -> x))) FROM (VALUES 'abc') t(x)");
}
@Test
public void testLambdaInAggregationContext()
{
analyze("SELECT apply(sum(x), i -> i * i) FROM (VALUES 1, 2, 3, 4, 5) t(x)");
analyze("SELECT apply(x, i -> i - 1), sum(y) FROM (VALUES (1, 10), (1, 20), (2, 50)) t(x,y) group by x");
analyze("SELECT x, apply(sum(y), i -> i * 10) FROM (VALUES (1, 10), (1, 20), (2, 50)) t(x,y) group by x");
analyze("SELECT apply(8, x -> x + 1) FROM (VALUES (1, 2)) t(x,y) GROUP BY y");
assertFails("SELECT apply(sum(x), i -> i * x) FROM (VALUES 1, 2, 3, 4, 5) t(x)")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
assertFails("SELECT apply(1, y -> x) FROM (VALUES (1,2)) t(x,y) GROUP BY y")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
assertFails("SELECT apply(1, y -> x.someField) FROM (VALUES (CAST(ROW(1) AS ROW(someField BIGINT)), 2)) t(x,y) GROUP BY y")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
analyze("SELECT apply(CAST(ROW(1) AS ROW(someField BIGINT)), x -> x.someField) FROM (VALUES (1,2)) t(x,y) GROUP BY y");
analyze("SELECT apply(sum(x), x -> x * x) FROM (VALUES 1, 2, 3, 4, 5) t(x)");
// nested lambda expression uses the same variable name
analyze("SELECT apply(sum(x), x -> apply(x, x -> x * x)) FROM (VALUES 1, 2, 3, 4, 5) t(x)");
// illegal use of a column whose name is the same as a lambda variable name
assertFails("SELECT apply(sum(x), x -> x * x) + x FROM (VALUES 1, 2, 3, 4, 5) t(x)")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
assertFails("SELECT apply(sum(x), x -> apply(x, x -> x * x)) + x FROM (VALUES 1, 2, 3, 4, 5) t(x)")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
// x + y within lambda should not be treated as group expression
assertFails("SELECT apply(1, y -> x + y) FROM (VALUES (1,2)) t(x, y) GROUP BY x+y")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
assertFails("SELECT apply(1, x -> y + transform(array[1], z -> x)[1]) FROM (VALUES (1, 2)) t(x,y) GROUP BY y + transform(array[1], z -> x)[1]")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching(".* must be an aggregate expression or appear in GROUP BY clause");
}
@Test
public void testLambdaInSubqueryContext()
{
analyze("SELECT apply(x, i -> i * i) FROM (SELECT 10 x)");
analyze("SELECT apply((SELECT 10), i -> i * i)");
// with capture
analyze("SELECT apply(x, i -> i * x) FROM (SELECT 10 x)");
analyze("SELECT apply(x, y -> y * x) FROM (SELECT 10 x, 3 y)");
analyze("SELECT apply(x, z -> y * x) FROM (SELECT 10 x, 3 y)");
}
@Test
public void testLambdaWithAggregationAndGrouping()
{
assertFails("SELECT transform(ARRAY[1], y -> max(x)) FROM (VALUES 10) t(x)")
.hasErrorCode(EXPRESSION_NOT_SCALAR)
.hasMessageMatching(".* Lambda expression cannot contain aggregations, window functions or grouping operations: .*");
// use of aggregation/window function on lambda variable
assertFails("SELECT apply(1, x -> max(x)) FROM (VALUES (1,2)) t(x,y) GROUP BY y")
.hasErrorCode(EXPRESSION_NOT_SCALAR)
.hasMessageMatching(".* Lambda expression cannot contain aggregations, window functions or grouping operations: .*");
assertFails("SELECT apply(CAST(ROW(1) AS ROW(someField BIGINT)), x -> max(x.someField)) FROM (VALUES (1,2)) t(x,y) GROUP BY y")
.hasErrorCode(EXPRESSION_NOT_SCALAR)
.hasMessageMatching(".* Lambda expression cannot contain aggregations, window functions or grouping operations: .*");
assertFails("SELECT apply(1, x -> grouping(x)) FROM (VALUES (1, 2)) t(x, y) GROUP BY y")
.hasErrorCode(EXPRESSION_NOT_SCALAR)
.hasMessageMatching(".* Lambda expression cannot contain aggregations, window functions or grouping operations: .*");
}
@Test
public void testLambdaWithSubquery()
{
assertFails("SELECT apply(1, i -> (SELECT 3)) FROM (VALUES 1) t(x)")
.hasErrorCode(NOT_SUPPORTED)
.hasMessageMatching(".* Lambda expression cannot contain subqueries");
assertFails("SELECT apply(1, i -> (SELECT i)) FROM (VALUES 1) t(x)")
.hasErrorCode(NOT_SUPPORTED)
.hasMessageMatching(".* Lambda expression cannot contain subqueries");
// GROUP BY column captured in lambda
analyze(
"SELECT (SELECT apply(0, x -> x + b) FROM (VALUES 1) x(a)) FROM t1 u GROUP BY b");
// non-GROUP BY column captured in lambda
assertFails("SELECT (SELECT apply(0, x -> x + a) FROM (VALUES 1) x(c)) " +
"FROM t1 u GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:34: Subquery uses 'a' which must appear in GROUP BY clause");
assertFails("SELECT (SELECT apply(0, x -> x + u.a) from (values 1) x(a)) " +
"FROM t1 u GROUP BY b")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:34: Subquery uses 'u.a' which must appear in GROUP BY clause");
// name shadowing
analyze("SELECT (SELECT apply(0, x -> x + a) FROM (VALUES 1) x(a)) FROM t1 u GROUP BY b");
analyze("SELECT (SELECT apply(0, a -> a + a)) FROM t1 u GROUP BY b");
}
@Test
public void testLambdaWithSubqueryInOrderBy()
{
analyze("SELECT a FROM t1 ORDER BY (SELECT apply(0, x -> x + a))");
analyze("SELECT a AS output_column FROM t1 ORDER BY (SELECT apply(0, x -> x + output_column))");
analyze("SELECT count(*) FROM t1 GROUP BY a ORDER BY (SELECT apply(0, x -> x + a))");
analyze("SELECT count(*) AS output_column FROM t1 GROUP BY a ORDER BY (SELECT apply(0, x -> x + output_column))");
assertFails("SELECT count(*) FROM t1 GROUP BY a ORDER BY (SELECT apply(0, x -> x + b))")
.hasErrorCode(EXPRESSION_NOT_AGGREGATE)
.hasMessageMatching("line 1:71: Subquery uses 'b' which must appear in GROUP BY clause");
}
@Test
public void testLambdaWithInvalidParameterCount()
{
assertFails("SELECT apply(5, (x, y) -> 6)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:17: Expected a lambda that takes 1 argument\\(s\\) but got 2");
assertFails("SELECT apply(5, (x, y, z) -> 6)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:17: Expected a lambda that takes 1 argument\\(s\\) but got 3");
assertFails("SELECT TRY(apply(5, (x, y) -> x + 1) / 0)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:21: Expected a lambda that takes 1 argument\\(s\\) but got 2");
assertFails("SELECT TRY(apply(5, (x, y, z) -> x + 1) / 0)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:21: Expected a lambda that takes 1 argument\\(s\\) but got 3");
assertFails("SELECT filter(ARRAY [5, 6], (x, y) -> x = 5)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:29: Expected a lambda that takes 1 argument\\(s\\) but got 2");
assertFails("SELECT filter(ARRAY [5, 6], (x, y, z) -> x = 5)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:29: Expected a lambda that takes 1 argument\\(s\\) but got 3");
assertFails("SELECT map_filter(map(ARRAY [5, 6], ARRAY [5, 6]), (x) -> x = 1)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:52: Expected a lambda that takes 2 argument\\(s\\) but got 1");
assertFails("SELECT map_filter(map(ARRAY [5, 6], ARRAY [5, 6]), (x, y, z) -> x = y + z)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:52: Expected a lambda that takes 2 argument\\(s\\) but got 3");
assertFails("SELECT reduce(ARRAY [5, 20], 0, (s) -> s, s -> s)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:33: Expected a lambda that takes 2 argument\\(s\\) but got 1");
assertFails("SELECT reduce(ARRAY [5, 20], 0, (s, x, z) -> s + x, s -> s + z)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:33: Expected a lambda that takes 2 argument\\(s\\) but got 3");
assertFails("SELECT transform(ARRAY [5, 6], (x, y) -> x + y)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:32: Expected a lambda that takes 1 argument\\(s\\) but got 2");
assertFails("SELECT transform(ARRAY [5, 6], (x, y, z) -> x + y + z)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:32: Expected a lambda that takes 1 argument\\(s\\) but got 3");
assertFails("SELECT transform_keys(map(ARRAY[1], ARRAY [2]), k -> k)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:49: Expected a lambda that takes 2 argument\\(s\\) but got 1");
assertFails("SELECT transform_keys(MAP(ARRAY['a'], ARRAY['b']), (k, v, x) -> k + 1)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:52: Expected a lambda that takes 2 argument\\(s\\) but got 3");
assertFails("SELECT transform_values(map(ARRAY[1], ARRAY [2]), k -> k)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:51: Expected a lambda that takes 2 argument\\(s\\) but got 1");
assertFails("SELECT transform_values(map(ARRAY[1], ARRAY [2]), (k, v, x) -> k + 1)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:51: Expected a lambda that takes 2 argument\\(s\\) but got 3");
assertFails("SELECT zip_with(ARRAY[1], ARRAY['a'], x -> x)")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:39: Expected a lambda that takes 2 argument\\(s\\) but got 1");
assertFails("SELECT zip_with(ARRAY[1], ARRAY['a'], (x, y, z) -> (x, y, z))")
.hasErrorCode(INVALID_PARAMETER_USAGE)
.hasMessageMatching("line 1:39: Expected a lambda that takes 2 argument\\(s\\) but got 3");
}
@Test
public void testInvalidDelete()
{
assertFails("DELETE FROM foo")
.hasErrorCode(TABLE_NOT_FOUND);
assertFails("DELETE FROM v1")
.hasErrorCode(NOT_SUPPORTED);
assertFails("DELETE FROM v1 WHERE a = 1")
.hasErrorCode(NOT_SUPPORTED);
}
@Test
public void testInvalidShowTables()
{
assertFails("SHOW TABLES FROM a.b.c")
.hasErrorCode(SYNTAX_ERROR);
Session session = testSessionBuilder()
.setCatalog(null)
.setSchema(null)
.build();
assertFails(session, "SHOW TABLES")
.hasErrorCode(MISSING_CATALOG_NAME);
assertFails(session, "SHOW TABLES FROM a")
.hasErrorCode(MISSING_CATALOG_NAME);
assertFails(session, "SHOW TABLES FROM c2.unknown")
.hasErrorCode(SCHEMA_NOT_FOUND);
session = testSessionBuilder()
.setCatalog(SECOND_CATALOG)
.setSchema(null)
.build();
assertFails(session, "SHOW TABLES")
.hasErrorCode(MISSING_SCHEMA_NAME);
assertFails(session, "SHOW TABLES FROM unknown")
.hasErrorCode(SCHEMA_NOT_FOUND);
}
@Test
public void testInvalidAtTimeZone()
{
assertFails("SELECT 'abc' AT TIME ZONE 'America/Los_Angeles'")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testValidJoinOnClause()
{
analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON TRUE");
analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON 1=1");
analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON a.x=b.x AND a.y=b.y");
analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON NULL");
}
@Test
public void testInValidJoinOnClause()
{
assertFails("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON 1")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON a.x + b.x")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON ROW (TRUE)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON (a.x=b.x, a.y=b.y)")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testInvalidAggregationFilter()
{
assertFails("SELECT sum(x) FILTER (WHERE x > 1) OVER (PARTITION BY x) FROM (VALUES (1), (2), (2), (4)) t (x)")
.hasErrorCode(NOT_SUPPORTED);
assertFails("SELECT abs(x) FILTER (where y = 1) FROM (VALUES (1, 1)) t(x, y)")
.hasErrorCode(FUNCTION_NOT_AGGREGATE);
assertFails("SELECT abs(x) FILTER (where y = 1) FROM (VALUES (1, 1, 1)) t(x, y, z) GROUP BY z")
.hasErrorCode(FUNCTION_NOT_AGGREGATE);
}
@Test
void testAggregationWithOrderBy()
{
analyze("SELECT array_agg(DISTINCT x ORDER BY x) FROM (VALUES (1, 2), (3, 4)) t(x, y)");
analyze("SELECT array_agg(x ORDER BY y) FROM (VALUES (1, 2), (3, 4)) t(x, y)");
assertFails("SELECT array_agg(DISTINCT x ORDER BY y) FROM (VALUES (1, 2), (3, 4)) t(x, y)")
.hasErrorCode(EXPRESSION_NOT_IN_DISTINCT);
assertFails("SELECT abs(x ORDER BY y) FROM (VALUES (1, 2), (3, 4)) t(x, y)")
.hasErrorCode(FUNCTION_NOT_AGGREGATE);
assertFails("SELECT array_agg(x ORDER BY x) FROM (VALUES MAP(ARRAY['a'], ARRAY['b'])) t(x)")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT 1 as a, array_agg(x ORDER BY a) FROM (VALUES (1), (2), (3)) t(x)")
.hasErrorCode(COLUMN_NOT_FOUND);
assertFails("SELECT 1 AS c FROM (VALUES (1), (2)) t(x) ORDER BY sum(x order by c)")
.hasErrorCode(COLUMN_NOT_FOUND);
}
@Test
public void testQuantifiedComparisonExpression()
{
analyze("SELECT * FROM t1 WHERE t1.a <= ALL (VALUES 10, 20)");
assertFails("SELECT * FROM t1 WHERE t1.a = ANY (SELECT 1, 2)")
.hasErrorCode(NOT_SUPPORTED);
assertFails("SELECT * FROM t1 WHERE t1.a = SOME (VALUES ('abc'))")
.hasErrorCode(TYPE_MISMATCH);
// map is not orderable
assertFails(("SELECT map(ARRAY[1], ARRAY['hello']) < ALL (VALUES map(ARRAY[1], ARRAY['hello']))"))
.hasErrorCode(TYPE_MISMATCH);
// but map is comparable
analyze(("SELECT map(ARRAY[1], ARRAY['hello']) = ALL (VALUES map(ARRAY[1], ARRAY['hello']))"));
// HLL is neither orderable nor comparable
assertFails("SELECT cast(NULL AS HyperLogLog) < ALL (VALUES cast(NULL AS HyperLogLog))")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT cast(NULL AS HyperLogLog) = ANY (VALUES cast(NULL AS HyperLogLog))")
.hasErrorCode(TYPE_MISMATCH);
// qdigest is neither orderable nor comparable
assertFails("SELECT cast(NULL AS qdigest(double)) < ALL (VALUES cast(NULL AS qdigest(double)))")
.hasErrorCode(TYPE_MISMATCH);
assertFails("SELECT cast(NULL AS qdigest(double)) = ANY (VALUES cast(NULL AS qdigest(double)))")
.hasErrorCode(TYPE_MISMATCH);
}
@Test
public void testJoinUnnest()
{
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) CROSS JOIN UNNEST(x)");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) LEFT OUTER JOIN UNNEST(x) ON true");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) RIGHT OUTER JOIN UNNEST(x) ON true");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) FULL OUTER JOIN UNNEST(x) ON true");
}
@Test
public void testJoinLateral()
{
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) CROSS JOIN LATERAL(VALUES x)");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) LEFT OUTER JOIN LATERAL(VALUES x) ON true");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) RIGHT OUTER JOIN LATERAL(VALUES x) ON true");
analyze("SELECT * FROM (VALUES array[2, 2]) a(x) FULL OUTER JOIN LATERAL(VALUES x) ON true");
}
@BeforeClass
public void setup()
{
CatalogManager catalogManager = new CatalogManager();
transactionManager = createTestTransactionManager(catalogManager);
accessControl = new AccessControlManager(transactionManager);
metadata = createTestMetadataManager(transactionManager, new FeaturesConfig());
metadata.addFunctions(ImmutableList.of(APPLY_FUNCTION));
Catalog tpchTestCatalog = createTestingCatalog(TPCH_CATALOG, TPCH_CATALOG_NAME);
catalogManager.registerCatalog(tpchTestCatalog);
metadata.getAnalyzePropertyManager().addProperties(TPCH_CATALOG_NAME, tpchTestCatalog.getConnector(TPCH_CATALOG_NAME).getAnalyzeProperties());
catalogManager.registerCatalog(createTestingCatalog(SECOND_CATALOG, SECOND_CATALOG_NAME));
catalogManager.registerCatalog(createTestingCatalog(THIRD_CATALOG, THIRD_CATALOG_NAME));
SchemaTableName table1 = new SchemaTableName("s1", "t1");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG,
new ConnectorTableMetadata(table1, ImmutableList.of(
new ColumnMetadata("a", BIGINT),
new ColumnMetadata("b", BIGINT),
new ColumnMetadata("c", BIGINT),
new ColumnMetadata("d", BIGINT))),
false));
SchemaTableName table2 = new SchemaTableName("s1", "t2");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG,
new ConnectorTableMetadata(table2, ImmutableList.of(
new ColumnMetadata("a", BIGINT),
new ColumnMetadata("b", BIGINT))),
false));
SchemaTableName table3 = new SchemaTableName("s1", "t3");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG,
new ConnectorTableMetadata(table3, ImmutableList.of(
new ColumnMetadata("a", BIGINT),
new ColumnMetadata("b", BIGINT),
new ColumnMetadata("x", BIGINT, null, true))),
false));
// table in different catalog
SchemaTableName table4 = new SchemaTableName("s2", "t4");
inSetupTransaction(session -> metadata.createTable(session, SECOND_CATALOG,
new ConnectorTableMetadata(table4, ImmutableList.of(
new ColumnMetadata("a", BIGINT))),
false));
// table with a hidden column
SchemaTableName table5 = new SchemaTableName("s1", "t5");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG,
new ConnectorTableMetadata(table5, ImmutableList.of(
new ColumnMetadata("a", BIGINT),
new ColumnMetadata("b", BIGINT, null, true))),
false));
// table with a varchar column
SchemaTableName table6 = new SchemaTableName("s1", "t6");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG,
new ConnectorTableMetadata(table6, ImmutableList.of(
new ColumnMetadata("a", BIGINT),
new ColumnMetadata("b", VARCHAR),
new ColumnMetadata("c", BIGINT),
new ColumnMetadata("d", BIGINT))),
false));
// table with bigint, double, array of bigints and array of doubles column
SchemaTableName table7 = new SchemaTableName("s1", "t7");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG,
new ConnectorTableMetadata(table7, ImmutableList.of(
new ColumnMetadata("a", BIGINT),
new ColumnMetadata("b", DOUBLE),
new ColumnMetadata("c", new ArrayType(BIGINT)),
new ColumnMetadata("d", new ArrayType(DOUBLE)))),
false));
// valid view referencing table in same schema
ConnectorViewDefinition viewData1 = new ConnectorViewDefinition(
"select a from t1",
Optional.of(TPCH_CATALOG),
Optional.of("s1"),
ImmutableList.of(new ViewColumn("a", BIGINT.getTypeSignature())),
Optional.of("user"),
false);
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v1"), viewData1, false));
// stale view (different column type)
ConnectorViewDefinition viewData2 = new ConnectorViewDefinition(
"select a from t1",
Optional.of(TPCH_CATALOG),
Optional.of("s1"),
ImmutableList.of(new ViewColumn("a", parseTypeSignature("varchar"))),
Optional.of("user"),
false);
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v2"), viewData2, false));
// view referencing table in different schema from itself and session
ConnectorViewDefinition viewData3 = new ConnectorViewDefinition(
"select a from t4",
Optional.of(SECOND_CATALOG),
Optional.of("s2"),
ImmutableList.of(new ViewColumn("a", BIGINT.getTypeSignature())),
Optional.of("owner"),
false);
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(THIRD_CATALOG, "s3", "v3"), viewData3, false));
// valid view with uppercase column name
ConnectorViewDefinition viewData4 = new ConnectorViewDefinition(
"select A from t1",
Optional.of("tpch"),
Optional.of("s1"),
ImmutableList.of(new ViewColumn("a", BIGINT.getTypeSignature())),
Optional.of("user"),
false);
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName("tpch", "s1", "v4"), viewData4, false));
// recursive view referencing to itself
ConnectorViewDefinition viewData5 = new ConnectorViewDefinition(
"select * from v5",
Optional.of(TPCH_CATALOG),
Optional.of("s1"),
ImmutableList.of(new ViewColumn("a", BIGINT.getTypeSignature())),
Optional.of("user"),
false);
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v5"), viewData5, false));
}
private void inSetupTransaction(Consumer<Session> consumer)
{
transaction(transactionManager, accessControl)
.singleStatement()
.readUncommitted()
.execute(SETUP_SESSION, consumer);
}
private static Analyzer createAnalyzer(Session session, Metadata metadata)
{
return new Analyzer(
session,
metadata,
SQL_PARSER,
new AllowAllAccessControl(),
Optional.empty(),
emptyList(),
WarningCollector.NOOP);
}
private void analyze(@Language("SQL") String query)
{
analyze(CLIENT_SESSION, query);
}
private void analyze(Session clientSession, @Language("SQL") String query)
{
transaction(transactionManager, accessControl)
.singleStatement()
.readUncommitted()
.readOnly()
.execute(clientSession, session -> {
Analyzer analyzer = createAnalyzer(session, metadata);
Statement statement = SQL_PARSER.createStatement(query);
analyzer.analyze(statement);
});
}
private PrestoExceptionAssert assertFails(Session session, @Language("SQL") String query)
{
return assertPrestoExceptionThrownBy(() -> analyze(session, query));
}
private PrestoExceptionAssert assertFails(@Language("SQL") String query)
{
return assertFails(CLIENT_SESSION, query);
}
private Catalog createTestingCatalog(String catalogName, CatalogName catalog)
{
CatalogName systemId = createSystemTablesCatalogName(catalog);
Connector connector = createTestingConnector();
InternalNodeManager nodeManager = new InMemoryNodeManager();
return new Catalog(
catalogName,
catalog,
connector,
createInformationSchemaCatalogName(catalog),
new InformationSchemaConnector(catalogName, nodeManager, metadata, accessControl),
systemId,
new SystemConnector(
nodeManager,
connector.getSystemTables(),
transactionId -> transactionManager.getConnectorTransaction(transactionId, catalog)));
}
private static Connector createTestingConnector()
{
return new Connector()
{
private final ConnectorMetadata metadata = new TestingMetadata();
@Override
public ConnectorTransactionHandle beginTransaction(IsolationLevel isolationLevel, boolean readOnly)
{
return new ConnectorTransactionHandle() {};
}
@Override
public ConnectorMetadata getMetadata(ConnectorTransactionHandle transaction)
{
return metadata;
}
@Override
public List<PropertyMetadata<?>> getAnalyzeProperties()
{
return ImmutableList.of(
stringProperty("p1", "test string property", "", false),
integerProperty("p2", "test integer property", 0, false));
}
};
}
}
| wyukawa/presto | presto-main/src/test/java/io/prestosql/sql/analyzer/TestAnalyzer.java | Java | apache-2.0 | 89,508 |
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.firebase.auth;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.util.DateTime;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.firebase.auth.internal.GetAccountInfoResponse.User;
import com.google.firebase.internal.NonNull;
import com.google.firebase.internal.Nullable;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Contains metadata associated with a Firebase user account. Instances of this class are immutable
* and thread safe.
*/
public class UserRecord implements UserInfo {
private static final String PROVIDER_ID = "firebase";
private static final Map<String, String> REMOVABLE_FIELDS = ImmutableMap.of(
"displayName", "DISPLAY_NAME",
"photoUrl", "PHOTO_URL");
static final String CUSTOM_ATTRIBUTES = "customAttributes";
private static final int MAX_CLAIMS_PAYLOAD_SIZE = 1000;
private final String uid;
private final String tenantId;
private final String email;
private final String phoneNumber;
private final boolean emailVerified;
private final String displayName;
private final String photoUrl;
private final boolean disabled;
private final ProviderUserInfo[] providers;
private final long tokensValidAfterTimestamp;
private final UserMetadata userMetadata;
private final Map<String, Object> customClaims;
UserRecord(User response, JsonFactory jsonFactory) {
checkNotNull(response, "response must not be null");
checkNotNull(jsonFactory, "jsonFactory must not be null");
checkArgument(!Strings.isNullOrEmpty(response.getUid()), "uid must not be null or empty");
this.uid = response.getUid();
this.tenantId = response.getTenantId();
this.email = response.getEmail();
this.phoneNumber = response.getPhoneNumber();
this.emailVerified = response.isEmailVerified();
this.displayName = response.getDisplayName();
this.photoUrl = response.getPhotoUrl();
this.disabled = response.isDisabled();
if (response.getProviders() == null || response.getProviders().length == 0) {
this.providers = new ProviderUserInfo[0];
} else {
this.providers = new ProviderUserInfo[response.getProviders().length];
for (int i = 0; i < this.providers.length; i++) {
this.providers[i] = new ProviderUserInfo(response.getProviders()[i]);
}
}
this.tokensValidAfterTimestamp = response.getValidSince() * 1000;
String lastRefreshAtRfc3339 = response.getLastRefreshAt();
long lastRefreshAtMillis = 0;
if (!Strings.isNullOrEmpty(lastRefreshAtRfc3339)) {
lastRefreshAtMillis = DateTime.parseRfc3339(lastRefreshAtRfc3339).getValue();
}
this.userMetadata = new UserMetadata(
response.getCreatedAt(), response.getLastLoginAt(), lastRefreshAtMillis);
this.customClaims = parseCustomClaims(response.getCustomClaims(), jsonFactory);
}
private Map<String, Object> parseCustomClaims(String customClaims, JsonFactory jsonFactory) {
if (Strings.isNullOrEmpty(customClaims)) {
return ImmutableMap.of();
}
try {
Map<String, Object> parsed = new HashMap<>();
jsonFactory.createJsonParser(customClaims).parseAndClose(parsed);
return ImmutableMap.copyOf(parsed);
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse custom claims json", e);
}
}
/**
* Returns the user ID of this user.
*
* @return a non-null, non-empty user ID string.
*/
@Override
public String getUid() {
return uid;
}
/**
* Returns the tenant ID associated with this user, if one exists.
*
* @return a tenant ID string or null.
*/
@Nullable
public String getTenantId() {
return this.tenantId;
}
/**
* Returns the provider ID of this user.
*
* @return a constant provider ID value.
*/
@Override
public String getProviderId() {
return PROVIDER_ID;
}
/**
* Returns the email address associated with this user.
*
* @return an email address string or null.
*/
@Nullable
@Override
public String getEmail() {
return email;
}
/**
* Returns the phone number associated with this user.
*
* @return a phone number string or null.
*/
@Nullable
@Override
public String getPhoneNumber() {
return phoneNumber;
}
/**
* Returns whether the email address of this user has been verified.
*
* @return true if the email has been verified, and false otherwise.
*/
public boolean isEmailVerified() {
return emailVerified;
}
/**
* Returns the display name of this user.
*
* @return a display name string or null.
*/
@Nullable
@Override
public String getDisplayName() {
return displayName;
}
/**
* Returns the photo URL of this user.
*
* @return a URL string or null.
*/
@Nullable
@Override
public String getPhotoUrl() {
return photoUrl;
}
/**
* Returns whether this user account is disabled.
*
* @return true if the user account is disabled, and false otherwise.
*/
public boolean isDisabled() {
return disabled;
}
/**
* Returns an array of {@code UserInfo} objects that represents the identities from different
* identity providers that are linked to this user.
*
* @return an array of {@link UserInfo} instances, which may be empty.
*/
public UserInfo[] getProviderData() {
return providers;
}
/**
* Returns a timestamp in milliseconds since epoch, truncated down to the closest second.
* Tokens minted before this timestamp are considered invalid.
*
* @return Timestamp in milliseconds since the epoch. Tokens minted before this timestamp are
* considered invalid.
*/
public long getTokensValidAfterTimestamp() {
return tokensValidAfterTimestamp;
}
/**
* Returns additional metadata associated with this user.
*
* @return a non-null UserMetadata instance.
*/
public UserMetadata getUserMetadata() {
return this.userMetadata;
}
/**
* Returns custom claims set on this user.
*
* @return a non-null, immutable Map of custom claims, possibly empty.
*/
@NonNull
public Map<String,Object> getCustomClaims() {
return customClaims;
}
/**
* Returns a new {@link UpdateRequest}, which can be used to update the attributes
* of this user.
*
* @return a non-null UserRecord.UpdateRequest instance.
*/
public UpdateRequest updateRequest() {
return new UpdateRequest(uid);
}
static void checkUid(String uid) {
checkArgument(!Strings.isNullOrEmpty(uid), "uid cannot be null or empty");
checkArgument(uid.length() <= 128, "UID cannot be longer than 128 characters");
}
static void checkEmail(String email) {
checkArgument(!Strings.isNullOrEmpty(email), "email cannot be null or empty");
checkArgument(email.matches("^[^@]+@[^@]+$"));
}
static void checkPhoneNumber(String phoneNumber) {
// Phone number verification is very lax here. Backend will enforce E.164 spec compliance, and
// normalize accordingly.
checkArgument(!Strings.isNullOrEmpty(phoneNumber), "phone number cannot be null or empty");
checkArgument(phoneNumber.startsWith("+"),
"phone number must be a valid, E.164 compliant identifier starting with a '+' sign");
}
static void checkProvider(String providerId, String providerUid) {
checkArgument(!Strings.isNullOrEmpty(providerId), "providerId must be a non-empty string");
checkArgument(!Strings.isNullOrEmpty(providerUid), "providerUid must be a non-empty string");
}
static void checkUrl(String photoUrl) {
checkArgument(!Strings.isNullOrEmpty(photoUrl), "url cannot be null or empty");
try {
new URL(photoUrl);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("malformed url string", e);
}
}
private static void checkPassword(String password) {
checkArgument(!Strings.isNullOrEmpty(password), "password cannot be null or empty");
checkArgument(password.length() >= 6, "password must be at least 6 characters long");
}
static void checkCustomClaims(Map<String,Object> customClaims) {
if (customClaims == null) {
return;
}
for (String key : customClaims.keySet()) {
checkArgument(!Strings.isNullOrEmpty(key), "Claim names must not be null or empty");
checkArgument(!FirebaseUserManager.RESERVED_CLAIMS.contains(key),
"Claim '" + key + "' is reserved and cannot be set");
}
}
private static void checkValidSince(long epochSeconds) {
checkArgument(epochSeconds > 0, "validSince (seconds since epoch) must be greater than 0: "
+ Long.toString(epochSeconds));
}
static String serializeCustomClaims(Map customClaims, JsonFactory jsonFactory) {
checkNotNull(jsonFactory, "JsonFactory must not be null");
if (customClaims == null || customClaims.isEmpty()) {
return "{}";
}
try {
String claimsPayload = jsonFactory.toString(customClaims);
checkArgument(claimsPayload.length() <= MAX_CLAIMS_PAYLOAD_SIZE,
"customClaims payload cannot be larger than " + MAX_CLAIMS_PAYLOAD_SIZE + " characters");
return claimsPayload;
} catch (IOException e) {
throw new IllegalArgumentException("Failed to serialize custom claims into JSON", e);
}
}
/**
* A specification class for creating new user accounts. Set the initial attributes of the new
* user account by calling various setter methods available in this class. None of the attributes
* are required.
*/
public static class CreateRequest {
private final Map<String,Object> properties = new HashMap<>();
/**
* Creates a new {@link CreateRequest}, which can be used to create a new user. The returned
* object should be passed to {@link FirebaseAuth#createUser(CreateRequest)} to register
* the user information persistently.
*/
public CreateRequest() {
}
/**
* Sets a user ID for the new user.
*
* @param uid a non-null, non-empty user ID that uniquely identifies the new user. The user ID
* must not be longer than 128 characters.
*/
public CreateRequest setUid(String uid) {
checkUid(uid);
properties.put("localId", uid);
return this;
}
/**
* Sets an email address for the new user.
*
* @param email a non-null, non-empty email address string.
*/
public CreateRequest setEmail(String email) {
checkEmail(email);
properties.put("email", email);
return this;
}
/**
* Sets a phone number for the new user.
*
* @param phone a non-null, non-empty phone number string.
*/
public CreateRequest setPhoneNumber(String phone) {
checkPhoneNumber(phone);
properties.put("phoneNumber", phone);
return this;
}
/**
* Sets whether the user email address has been verified or not.
*
* @param emailVerified a boolean indicating the email verification status.
*/
public CreateRequest setEmailVerified(boolean emailVerified) {
properties.put("emailVerified", emailVerified);
return this;
}
/**
* Sets the display name for the new user.
*
* @param displayName a non-null display name string.
*/
public CreateRequest setDisplayName(String displayName) {
checkNotNull(displayName, "displayName cannot be null");
properties.put("displayName", displayName);
return this;
}
/**
* Sets the photo URL for the new user.
*
* @param photoUrl a non-null, non-empty URL string.
*/
public CreateRequest setPhotoUrl(String photoUrl) {
checkUrl(photoUrl);
properties.put("photoUrl", photoUrl);
return this;
}
/**
* Sets whether the new user account should be disabled by default or not.
*
* @param disabled a boolean indicating whether the new account should be disabled.
*/
public CreateRequest setDisabled(boolean disabled) {
properties.put("disabled", disabled);
return this;
}
/**
* Sets the password for the new user.
*
* @param password a password string that is at least 6 characters long.
*/
public CreateRequest setPassword(String password) {
checkPassword(password);
properties.put("password", password);
return this;
}
Map<String, Object> getProperties() {
return ImmutableMap.copyOf(properties);
}
}
/**
* A class for updating the attributes of an existing user. An instance of this class can be
* obtained via a {@link UserRecord} object, or from a user ID string. Specify the changes to be
* made in the user account by calling the various setter methods available in this class.
*/
public static class UpdateRequest {
private final Map<String,Object> properties = new HashMap<>();
/**
* Creates a new {@link UpdateRequest}, which can be used to update the attributes
* of the user identified by the specified user ID. This method allows updating attributes of
* a user account, without first having to call {@link FirebaseAuth#getUser(String)}.
*
* @param uid a non-null, non-empty user ID string.
* @throws IllegalArgumentException If the user ID is null or empty.
*/
public UpdateRequest(String uid) {
checkArgument(!Strings.isNullOrEmpty(uid), "uid must not be null or empty");
properties.put("localId", uid);
}
String getUid() {
return (String) properties.get("localId");
}
/**
* Updates the email address associated with this user.
*
* @param email a non-null, non-empty email address to be associated with the user.
*/
public UpdateRequest setEmail(String email) {
checkEmail(email);
properties.put("email", email);
return this;
}
/**
* Updates the phone number associated with this user. Calling this method with a null argument
* removes the phone number from the user account.
*
* @param phone a valid phone number string or null.
*/
public UpdateRequest setPhoneNumber(@Nullable String phone) {
if (phone != null) {
checkPhoneNumber(phone);
}
if (phone == null && properties.containsKey("deleteProvider")) {
Object deleteProvider = properties.get("deleteProvider");
if (deleteProvider != null) {
// Due to java's type erasure, we can't fully check the type. :(
@SuppressWarnings("unchecked")
Iterable<String> deleteProviderIterable = (Iterable<String>)deleteProvider;
// If we've been told to unlink the phone provider both via setting phoneNumber to null
// *and* by setting providersToUnlink to include 'phone', then we'll reject that. Though
// it might also be reasonable to relax this restriction and just unlink it.
for (String dp : deleteProviderIterable) {
if (dp == "phone") {
throw new IllegalArgumentException(
"Both UpdateRequest.setPhoneNumber(null) and "
+ "UpdateRequest.setProvidersToUnlink(['phone']) were set. To unlink from a "
+ "phone provider, only specify UpdateRequest.setPhoneNumber(null).");
}
}
}
}
properties.put("phoneNumber", phone);
return this;
}
/**
* Updates the email verification status of this account.
*
* @param emailVerified a boolean indicating whether the email address has been verified.
*/
public UpdateRequest setEmailVerified(boolean emailVerified) {
properties.put("emailVerified", emailVerified);
return this;
}
/**
* Updates the display name of this user. Calling this method with a null argument removes the
* display name attribute from the user account.
*
* @param displayName a display name string or null
*/
public UpdateRequest setDisplayName(@Nullable String displayName) {
properties.put("displayName", displayName);
return this;
}
/**
* Updates the Photo URL of this user. Calling this method with a null argument removes
* the photo URL attribute from the user account.
*
* @param photoUrl a valid URL string or null
*/
public UpdateRequest setPhotoUrl(@Nullable String photoUrl) {
// This is allowed to be null
if (photoUrl != null) {
checkUrl(photoUrl);
}
properties.put("photoUrl", photoUrl);
return this;
}
/**
* Enables or disables this user account.
*
* @param disabled a boolean indicating whether this account should be disabled.
*/
public UpdateRequest setDisabled(boolean disabled) {
properties.put("disableUser", disabled);
return this;
}
/**
* Updates the password of this user.
*
* @param password a new password string that is at least 6 characters long.
*/
public UpdateRequest setPassword(String password) {
checkPassword(password);
properties.put("password", password);
return this;
}
/**
* Updates the custom claims associated with this user. Calling this method with a null
* argument removes any custom claims from the user account.
*
* @param customClaims a Map of custom claims or null
*/
public UpdateRequest setCustomClaims(Map<String,Object> customClaims) {
checkCustomClaims(customClaims);
properties.put(CUSTOM_ATTRIBUTES, customClaims);
return this;
}
/**
* Links this user to the specified provider.
*
* <p>Linking a provider to an existing user account does not invalidate the
* refresh token of that account. In other words, the existing account
* continues to be able to access resources, despite not having used
* the newly linked provider to sign in. If you wish to force the user to
* authenticate with this new provider, you need to (a) revoke their
* refresh token (see
* https://firebase.google.com/docs/auth/admin/manage-sessions#revoke_refresh_tokens),
* and (b) ensure no other authentication methods are present on this
* account.
*
* @param providerToLink provider info to be linked to this user\'s account.
*/
public UpdateRequest setProviderToLink(@NonNull UserProvider providerToLink) {
properties.put("linkProviderUserInfo", checkNotNull(providerToLink));
return this;
}
/**
* Unlinks this user from the specified providers.
*
* @param providerIds list of identifiers for the identity providers.
*/
public UpdateRequest setProvidersToUnlink(Iterable<String> providerIds) {
checkNotNull(providerIds);
for (String id : providerIds) {
checkArgument(!Strings.isNullOrEmpty(id), "providerIds must not be null or empty");
if (id == "phone" && properties.containsKey("phoneNumber")
&& properties.get("phoneNumber") == null) {
// If we've been told to unlink the phone provider both via setting phoneNumber to null
// *and* by setting providersToUnlink to include 'phone', then we'll reject that. Though
// it might also be reasonable to relax this restriction and just unlink it.
throw new IllegalArgumentException(
"Both UpdateRequest.setPhoneNumber(null) and "
+ "UpdateRequest.setProvidersToUnlink(['phone']) were set. To unlink from a phone "
+ "provider, only specify UpdateRequest.setPhoneNumber(null).");
}
}
properties.put("deleteProvider", providerIds);
return this;
}
UpdateRequest setValidSince(long epochSeconds) {
checkValidSince(epochSeconds);
properties.put("validSince", epochSeconds);
return this;
}
Map<String, Object> getProperties(JsonFactory jsonFactory) {
Map<String, Object> copy = new HashMap<>(properties);
List<String> remove = new ArrayList<>();
for (Map.Entry<String, String> entry : REMOVABLE_FIELDS.entrySet()) {
if (copy.containsKey(entry.getKey()) && copy.get(entry.getKey()) == null) {
remove.add(entry.getValue());
copy.remove(entry.getKey());
}
}
if (!remove.isEmpty()) {
copy.put("deleteAttribute", ImmutableList.copyOf(remove));
}
if (copy.containsKey("phoneNumber") && copy.get("phoneNumber") == null) {
Object deleteProvider = copy.get("deleteProvider");
if (deleteProvider != null) {
// Due to java's type erasure, we can't fully check the type. :(
@SuppressWarnings("unchecked")
Iterable<String> deleteProviderIterable = (Iterable<String>)deleteProvider;
copy.put("deleteProvider", new ImmutableList.Builder<String>()
.addAll(deleteProviderIterable)
.add("phone")
.build());
} else {
copy.put("deleteProvider", ImmutableList.of("phone"));
}
copy.remove("phoneNumber");
}
if (copy.containsKey(CUSTOM_ATTRIBUTES)) {
Map customClaims = (Map) copy.remove(CUSTOM_ATTRIBUTES);
copy.put(CUSTOM_ATTRIBUTES, serializeCustomClaims(customClaims, jsonFactory));
}
return ImmutableMap.copyOf(copy);
}
}
}
| firebase/firebase-admin-java | src/main/java/com/google/firebase/auth/UserRecord.java | Java | apache-2.0 | 22,304 |
cc.Class({
extends: cc.Component,
properties: {
self: {
default: null, type: cc.Button
},
other1: {
default: null, type: cc.Button
},
other2: {
default: null, type: cc.Button
},
},
// called every frame, uncomment this function to activate update callback
shifter: function () {
this.self.interactable = false;
this.other1.interactable = true;
this.other2.interactable = true;
},
});
| lllyasviel/style2paints | V3/client/assets/script/toggleBTN.js | JavaScript | apache-2.0 | 518 |
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package init;
import static setup.SetupCleanup.deleteBucket;
import static setup.SetupCleanup.deleteDataset;
import com.google.api.gax.rpc.PermissionDeniedException;
import com.google.cloud.retail.v2.DeleteProductRequest;
import com.google.cloud.retail.v2.ListProductsRequest;
import com.google.cloud.retail.v2.Product;
import com.google.cloud.retail.v2.ProductServiceClient;
import com.google.cloud.retail.v2.ProductServiceClient.ListProductsPagedResponse;
import java.io.IOException;
public class RemoveTestResources {
private static final String PROJECT_ID = System.getenv("PROJECT_ID");
private static final String BUCKET_NAME = System.getenv("BUCKET_NAME");
private static final String DEFAULT_CATALOG =
String.format(
"projects/%s/locations/global/catalogs/default_catalog/" + "branches/0", PROJECT_ID);
public static void main(String[] args) throws IOException {
deleteBucket(BUCKET_NAME);
deleteAllProducts();
deleteDataset(PROJECT_ID, "products");
deleteDataset(PROJECT_ID, "user_events");
}
public static void deleteAllProducts() throws IOException {
System.out.println("Deleting products in process, please wait...");
try (ProductServiceClient productServiceClient = ProductServiceClient.create()) {
ListProductsRequest listRequest =
ListProductsRequest.newBuilder().setParent(DEFAULT_CATALOG).build();
ListProductsPagedResponse products = productServiceClient.listProducts(listRequest);
int deleteCount = 0;
for (Product product : products.iterateAll()) {
DeleteProductRequest deleteRequest =
DeleteProductRequest.newBuilder().setName(product.getName()).build();
try {
productServiceClient.deleteProduct(deleteRequest);
deleteCount++;
} catch (PermissionDeniedException e) {
System.out.println(
"Ignore PermissionDenied in case the product does not exist "
+ "at time of deletion.");
}
}
System.out.printf("%s products were deleted from %s%n", deleteCount, DEFAULT_CATALOG);
}
}
}
| googleapis/java-retail | samples/interactive-tutorials/src/main/java/init/RemoveTestResources.java | Java | apache-2.0 | 2,705 |
<?php
/**
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Mss\S3;
use Mss\Common\Client\AbstractClient;
use Mss\Common\Client\ClientBuilder;
use Mss\Common\Client\ExpiredCredentialsChecker;
use Mss\Common\Client\UploadBodyListener;
use Mss\Common\Enum\ClientOptions as Options;
use Mss\Common\Exception\RuntimeException;
use Mss\Common\Exception\InvalidArgumentException;
use Mss\Common\Signature\SignatureV4;
use Mss\Common\Model\MultipartUpload\AbstractTransfer;
use Mss\S3\Exception\AccessDeniedException;
use Mss\S3\Exception\Parser\S3ExceptionParser;
use Mss\S3\Exception\S3Exception;
use Mss\S3\Model\ClearBucket;
use Mss\S3\Model\MultipartUpload\AbstractTransfer as AbstractMulti;
use Mss\S3\Model\MultipartUpload\UploadBuilder;
use Mss\S3\Sync\DownloadSyncBuilder;
use Mss\S3\Sync\UploadSyncBuilder;
use Guzzle\Common\Collection;
use Guzzle\Http\EntityBody;
use Guzzle\Http\Message\RequestInterface;
use Guzzle\Iterator\FilterIterator;
use Guzzle\Plugin\Backoff\BackoffPlugin;
use Guzzle\Plugin\Backoff\CurlBackoffStrategy;
use Guzzle\Plugin\Backoff\ExponentialBackoffStrategy;
use Guzzle\Plugin\Backoff\HttpBackoffStrategy;
use Guzzle\Plugin\Backoff\TruncatedBackoffStrategy;
use Guzzle\Service\Command\CommandInterface;
use Guzzle\Service\Command\Factory\AliasFactory;
use Guzzle\Service\Command\Factory\CompositeFactory;
use Guzzle\Service\Resource\Model;
use Guzzle\Service\Resource\ResourceIteratorInterface;
/**
* Client to interact with Amazon Simple Storage Service
*
* @method S3SignatureInterface getSignature() Returns the signature implementation used with the client
* @method Model abortMultipartUpload(array $args = array()) {@command S3 AbortMultipartUpload}
* @method Model completeMultipartUpload(array $args = array()) {@command S3 CompleteMultipartUpload}
* @method Model copyObject(array $args = array()) {@command S3 CopyObject}
* @method Model createBucket(array $args = array()) {@command S3 CreateBucket}
* @method Model createMultipartUpload(array $args = array()) {@command S3 CreateMultipartUpload}
* @method Model deleteBucket(array $args = array()) {@command S3 DeleteBucket}
* @method Model deleteObject(array $args = array()) {@command S3 DeleteObject}
* @method Model deleteObjects(array $args = array()) {@command S3 DeleteObjects}
* @method Model getBucketAcl(array $args = array()) {@command S3 GetBucketAcl}
* @method Model getBucketVersioning(array $args = array()) {@command S3 GetBucketVersioning}
* @method Model getObject(array $args = array()) {@command S3 GetObject}
* @method Model getObjectAcl(array $args = array()) {@command S3 GetObjectAcl}
* @method Model headBucket(array $args = array()) {@command S3 HeadBucket}
* @method Model headObject(array $args = array()) {@command S3 HeadObject}
* @method Model listBuckets(array $args = array()) {@command S3 ListBuckets}
* @method Model listMultipartUploads(array $args = array()) {@command S3 ListMultipartUploads}
* @method Model listObjectVersions(array $args = array()) {@command S3 ListObjectVersions}
* @method Model listObjects(array $args = array()) {@command S3 ListObjects}
* @method Model listParts(array $args = array()) {@command S3 ListParts}
* @method Model putBucketAcl(array $args = array()) {@command S3 PutBucketAcl}
* @method Model putObject(array $args = array()) {@command S3 PutObject}
* @method Model putObjectAcl(array $args = array()) {@command S3 PutObjectAcl}
* @method Model uploadPart(array $args = array()) {@command S3 UploadPart}
* @method waitUntilBucketExists(array $input) The input array uses the parameters of the HeadBucket operation and waiter specific settings
* @method waitUntilBucketNotExists(array $input) The input array uses the parameters of the HeadBucket operation and waiter specific settings
* @method waitUntilObjectExists(array $input) The input array uses the parameters of the HeadObject operation and waiter specific settings
* @method ResourceIteratorInterface getListBucketsIterator(array $args = array()) The input array uses the parameters of the ListBuckets operation
* @method ResourceIteratorInterface getListMultipartUploadsIterator(array $args = array()) The input array uses the parameters of the ListMultipartUploads operation
* @method ResourceIteratorInterface getListObjectVersionsIterator(array $args = array()) The input array uses the parameters of the ListObjectVersions operation
* @method ResourceIteratorInterface getListObjectsIterator(array $args = array()) The input array uses the parameters of the ListObjects operation
* @method ResourceIteratorInterface getListPartsIterator(array $args = array()) The input array uses the parameters of the ListParts operation
*
* @link http://docs.aws.amazon.com/aws-sdk-php/v2/guide/service-s3.html User guide
* @link http://docs.aws.amazon.com/aws-sdk-php/v2/api/class-Mss.S3.S3Client.html API docs
*/
class S3Client extends AbstractClient
{
const LATEST_API_VERSION = '2006-03-01';
/**
* @var array Aliases for S3 operations
*/
protected static $commandAliases = array(
// REST API Docs Aliases
'GetService' => 'ListBuckets',
'GetBucket' => 'ListObjects',
'PutBucket' => 'CreateBucket',
// SDK 1.x Aliases
'GetBucketHeaders' => 'HeadBucket',
'GetObjectHeaders' => 'HeadObject',
'SetBucketAcl' => 'PutBucketAcl',
'CreateObject' => 'PutObject',
'DeleteObjects' => 'DeleteMultipleObjects',
'PutObjectCopy' => 'CopyObject',
'SetObjectAcl' => 'PutObjectAcl',
'GetLogs' => 'GetBucketLogging',
'GetVersioningStatus' => 'GetBucketVersioning',
//'SetBucketPolicy' => 'PutBucketPolicy',
//'CreateBucketNotification' => 'PutBucketNotification',
//'GetBucketNotifications' => 'GetBucketNotification',
'CopyPart' => 'UploadPartCopy',
//'CreateWebsiteConfig' => 'PutBucketWebsite',
//'GetWebsiteConfig' => 'GetBucketWebsite',
//'DeleteWebsiteConfig' => 'DeleteBucketWebsite',
//'CreateObjectExpirationConfig' => 'PutBucketLifecycle',
//'GetObjectExpirationConfig' => 'GetBucketLifecycle',
//'DeleteObjectExpirationConfig' => 'DeleteBucketLifecycle',
);
protected $directory = __DIR__;
/**
* Factory method to create a new Amazon S3 client using an array of configuration options.
*
* @param array|Collection $config Client configuration data
*
* @return S3Client
* @link http://docs.aws.amazon.com/aws-sdk-php/v2/guide/configuration.html#client-configuration-options
*/
public static function factory($config = array())
{
$s3_image = null;
if (array_key_exists("image_endpoint", $config)) {
$image_config = $config;
$image_config["endpoint"] = $config["image_endpoint"];
# if (!array_key_exists("endpoint", $config)) {
# $config["endpoint"] = $config["image_endpoint"];
# }
unset($config["image_endpoint"]);
unset($image_config["image_endpoint"]);
$s3_image = S3Client::factory($image_config);
}
$exceptionParser = new S3ExceptionParser();
// Configure the custom exponential backoff plugin for retrying S3 specific errors
if (!isset($config[Options::BACKOFF])) {
$config[Options::BACKOFF] = static::createBackoffPlugin($exceptionParser);
}
$config[Options::SIGNATURE] = $signature = static::createSignature($config);
$client = ClientBuilder::factory(__NAMESPACE__)
->setConfig($config)
->setConfigDefaults(array(
Options::VERSION => self::LATEST_API_VERSION,
Options::SERVICE_DESCRIPTION => __DIR__ . '/Resources/s3-%s.php'
))
->setExceptionParser($exceptionParser)
->setIteratorsConfig(array(
'more_key' => 'IsTruncated',
'operations' => array(
'ListBuckets',
'ListMultipartUploads' => array(
'limit_param' => 'MaxUploads',
'token_param' => array('KeyMarker', 'UploadIdMarker'),
'token_key' => array('NextKeyMarker', 'NextUploadIdMarker'),
),
'ListObjects' => array(
'limit_param' => 'MaxKeys',
'token_param' => 'Marker',
'token_key' => 'NextMarker',
),
'ListObjectVersions' => array(
'limit_param' => 'MaxKeys',
'token_param' => array('KeyMarker', 'VersionIdMarker'),
'token_key' => array('nextKeyMarker', 'nextVersionIdMarker'),
),
'ListParts' => array(
'limit_param' => 'MaxParts',
'result_key' => 'Parts',
'token_param' => 'PartNumberMarker',
'token_key' => 'NextPartNumberMarker',
),
)
))
->build($s3_image);
// Use virtual hosted buckets when possible
$client->addSubscriber(new BucketStyleListener());
// Ensure that ACP headers are applied when needed
//$client->addSubscriber(new AcpListener());
// Validate and add required Content-MD5 hashes (e.g. DeleteObjects)
$client->addSubscriber(new S3Md5Listener($signature));
// Allow for specifying bodies with file paths and file handles
$client->addSubscriber(new UploadBodyListener(array('PutObject', 'UploadPart')));
// Ensures that if a SSE-CPK key is provided, the key and md5 are formatted correctly
//$client->addSubscriber(new SseCpkListener);
// Add aliases for some S3 operations
$default = CompositeFactory::getDefaultChain($client);
$default->add(
new AliasFactory($client, static::$commandAliases),
'Guzzle\Service\Command\Factory\ServiceDescriptionFactory'
);
$client->setCommandFactory($default);
return $client;
}
/**
* Create an Amazon S3 specific backoff plugin
*
* @param S3ExceptionParser $exceptionParser
*
* @return BackoffPlugin
*/
private static function createBackoffPlugin(S3ExceptionParser $exceptionParser)
{
return new BackoffPlugin(
new TruncatedBackoffStrategy(3,
new CurlBackoffStrategy(null,
new HttpBackoffStrategy(null,
new SocketTimeoutChecker(
new ExpiredCredentialsChecker($exceptionParser,
new ExponentialBackoffStrategy()
)
)
)
)
)
);
}
/**
* Create an appropriate signature based on the configuration settings
*
* @param $config
*
* @return \Mss\Common\Signature\SignatureInterface
* @throws InvalidArgumentException
*/
private static function createSignature($config)
{
$currentValue = isset($config[Options::SIGNATURE]) ? $config[Options::SIGNATURE] : null;
// Use the Amazon S3 signature V4 when the value is set to "v4" or when
// the value is not set and the region starts with "cn-".
if ($currentValue == 'v4') {
// Force SignatureV4 for specific regions or if specified in the config
throw new InvalidArgumentException('not support signature v4');
} elseif (!$currentValue || $currentValue == 's3') {
// Use the Amazon S3 signature by default
$currentValue = new S3Signature();
}
return $currentValue;
}
/**
* Determine if a string is a valid name for a DNS compatible Amazon S3
* bucket, meaning the bucket can be used as a subdomain in a URL (e.g.,
* "<bucket>.s3.amazonaws.com").
*
* @param string $bucket The name of the bucket to check.
*
* @return bool TRUE if the bucket name is valid or FALSE if it is invalid.
*/
public static function isValidBucketName($bucket)
{
$bucketLen = strlen($bucket);
if ($bucketLen < 3 || $bucketLen > 63 ||
// Cannot look like an IP address
preg_match('/(\d+\.){3}\d+$/', $bucket) ||
// Cannot include special characters, must start and end with lower alnum
!preg_match('/^[a-z0-9]([a-z0-9\-\.]*[a-z0-9])?$/', $bucket)
) {
return false;
}
return true;
}
/**
* Create a pre-signed URL for a request
*
* @param RequestInterface $request Request to generate the URL for. Use the factory methods of the client to
* create this request object
* @param int|string|\DateTime $expires The time at which the URL should expire. This can be a Unix timestamp, a
* PHP DateTime object, or a string that can be evaluated by strtotime
*
* @return string
* @throws InvalidArgumentException if the request is not associated with this client object
*/
public function createPresignedUrl(RequestInterface $request, $expires)
{
if ($request->getClient() !== $this) {
throw new InvalidArgumentException('The request object must be associated with the client. Use the '
. '$client->get(), $client->head(), $client->post(), $client->put(), etc. methods when passing in a '
. 'request object');
}
return $this->signature->createPresignedUrl($request, $this->credentials, $expires);
}
/**
* Returns the URL to an object identified by its bucket and key. If an expiration time is provided, the URL will
* be signed and set to expire at the provided time.
*
* @param string $bucket The name of the bucket where the object is located
* @param string $key The key of the object
* @param mixed $expires The time at which the URL should expire
* @param array $args Arguments to the GetObject command. Additionally you can specify a "Scheme" if you would
* like the URL to use a different scheme than what the client is configured to use
*
* @return string The URL to the object
*/
public function getObjectUrl($bucket, $key, $expires = null, array $args = array())
{
$command = $this->getCommand('GetObject', $args + array('Bucket' => $bucket, 'Key' => $key));
if ($command->hasKey('Scheme')) {
$scheme = $command['Scheme'];
$request = $command->remove('Scheme')->prepare()->setScheme($scheme)->setPort(null);
} else {
$request = $command->prepare();
}
return $expires ? $this->createPresignedUrl($request, $expires) : $request->getUrl();
}
/**
* Refers to function getObjectUrl's description
*/
public function getImageObjectUrl($bucket, $key, $expires = null, array $args = array())
{
if ($this->s3_image === null) {
throw new RuntimeException ('The s3 client of image is null. You should set the image_endpoint firstly when using function getImageObjectUrl.');
}
return $this->s3_image->getObjectUrl($bucket, $key, $expires, $args);
}
/**
* Get the image object, when setting the image_endpoint
* @param array $args Arguments to get image object, includes keys:"Bucket", "Key", "SaveAs"; The three keys are both necessary and case sensitive; Refers to function getObject's description
*/
public function getImageObject($args = array())
{
if ($this->s3_image === null) {
throw new RuntimeException ('The s3 client of image is null. You should set the image_endpoint firstly when using function getImageObject.');
}
$this->s3_image->getObject($args);
}
/**
* Helper used to clear the contents of a bucket. Use the {@see ClearBucket} object directly
* for more advanced options and control.
*
* @param string $bucket Name of the bucket to clear.
*
* @return int Returns the number of deleted keys
*/
public function clearBucket($bucket)
{
$clear = new ClearBucket($this, $bucket);
return $clear->clear();
}
/**
* Determines whether or not a bucket exists by name
*
* @param string $bucket The name of the bucket
* @param bool $accept403 Set to true if 403s are acceptable
* @param array $options Additional options to add to the executed command
*
* @return bool
*/
public function doesBucketExist($bucket, $accept403 = true, array $options = array())
{
return $this->checkExistenceWithCommand(
$this->getCommand('HeadBucket', array_merge($options, array(
'Bucket' => $bucket
))), $accept403
);
}
/**
* Determines whether or not an object exists by name
*
* @param string $bucket The name of the bucket
* @param string $key The key of the object
* @param array $options Additional options to add to the executed command
*
* @return bool
*/
public function doesObjectExist($bucket, $key, array $options = array())
{
return $this->checkExistenceWithCommand(
$this->getCommand('HeadObject', array_merge($options, array(
'Bucket' => $bucket,
'Key' => $key
)))
);
}
/**
* Raw URL encode a key and allow for '/' characters
*
* @param string $key Key to encode
*
* @return string Returns the encoded key
*/
public static function encodeKey($key)
{
return str_replace('%2F', '/', rawurlencode($key));
}
/**
* Explode a prefixed key into an array of values
*
* @param string $key Key to explode
*
* @return array Returns the exploded
*/
public static function explodeKey($key)
{
// Remove a leading slash if one is found
return explode('/', $key && $key[0] == '/' ? substr($key, 1) : $key);
}
/**
* Register the Amazon S3 stream wrapper and associates it with this client object
*
* @return $this
public function registerStreamWrapper()
{
StreamWrapper::register($this);
return $this;
}
*/
/**
* Upload a file, stream, or string to a bucket. If the upload size exceeds the specified threshold, the upload
* will be performed using parallel multipart uploads.
*
* @param string $bucket Bucket to upload the object
* @param string $key Key of the object
* @param mixed $body Object data to upload. Can be a Guzzle\Http\EntityBodyInterface, stream resource, or
* string of data to upload.
* @param string $acl ACL to apply to the object
* @param array $options Custom options used when executing commands:
* - params: Custom parameters to use with the upload. The parameters must map to a PutObject
* or InitiateMultipartUpload operation parameters.
* - min_part_size: Minimum size to allow for each uploaded part when performing a multipart upload.
* - concurrency: Maximum number of concurrent multipart uploads.
* - before_upload: Callback to invoke before each multipart upload. The callback will receive a
* Guzzle\Common\Event object with context.
*
* @see Mss\S3\Model\MultipartUpload\UploadBuilder for more options and customization
* @return \Guzzle\Service\Resource\Model Returns the modeled result of the performed operation
*/
public function upload($bucket, $key, $body, $acl = 'private', array $options = array())
{
$body = EntityBody::factory($body);
$options = Collection::fromConfig(array_change_key_case($options), array(
'min_part_size' => AbstractMulti::MIN_PART_SIZE,
'params' => array(),
'concurrency' => $body->getWrapper() == 'plainfile' ? 3 : 1
));
if ($body->getSize() < $options['min_part_size']) {
// Perform a simple PutObject operation
return $this->putObject(array(
'Bucket' => $bucket,
'Key' => $key,
'Body' => $body,
'ACL' => $acl
) + $options['params']);
}
// Perform a multipart upload if the file is large enough
$transfer = UploadBuilder::newInstance()
->setBucket($bucket)
->setKey($key)
->setMinPartSize($options['min_part_size'])
->setConcurrency($options['concurrency'])
->setClient($this)
->setSource($body)
->setTransferOptions($options->toArray())
->addOptions($options['params'])
->setOption('ACL', $acl)
->build();
if ($options['before_upload']) {
$transfer->getEventDispatcher()->addListener(
AbstractTransfer::BEFORE_PART_UPLOAD,
$options['before_upload']
);
}
return $transfer->upload();
}
/**
* Recursively uploads all files in a given directory to a given bucket.
*
* @param string $directory Full path to a directory to upload
* @param string $bucket Name of the bucket
* @param string $keyPrefix Virtual directory key prefix to add to each upload
* @param array $options Associative array of upload options
* - params: Array of parameters to use with each PutObject operation performed during the transfer
* - base_dir: Base directory to remove from each object key
* - force: Set to true to upload every file, even if the file is already in Amazon S3 and has not changed
* - concurrency: Maximum number of parallel uploads (defaults to 10)
* - debug: Set to true or an fopen resource to enable debug mode to print information about each upload
* - multipart_upload_size: When the size of a file exceeds this value, the file will be uploaded using a
* multipart upload.
*
* @see Mss\S3\S3Sync\S3Sync for more options and customization
*/
public function uploadDirectory($directory, $bucket, $keyPrefix = null, array $options = array())
{
$options = Collection::fromConfig(
$options,
array(
'base_dir' => realpath($directory) ?: $directory
)
);
$builder = $options['builder'] ?: UploadSyncBuilder::getInstance();
$builder->uploadFromDirectory($directory)
->setClient($this)
->setBucket($bucket)
->setKeyPrefix($keyPrefix)
->setConcurrency($options['concurrency'] ?: 5)
->setBaseDir($options['base_dir'])
->force($options['force'])
->setOperationParams($options['params'] ?: array())
->enableDebugOutput($options['debug']);
if ($options->hasKey('multipart_upload_size')) {
$builder->setMultipartUploadSize($options['multipart_upload_size']);
}
$builder->build()->transfer();
}
/**
* Downloads a bucket to the local filesystem
*
* @param string $directory Directory to download to
* @param string $bucket Bucket to download from
* @param string $keyPrefix Only download objects that use this key prefix
* @param array $options Associative array of download options
* - params: Array of parameters to use with each GetObject operation performed during the transfer
* - base_dir: Base directory to remove from each object key when storing in the local filesystem
* - force: Set to true to download every file, even if the file is already on the local filesystem and has not
* changed
* - concurrency: Maximum number of parallel downloads (defaults to 10)
* - debug: Set to true or a fopen resource to enable debug mode to print information about each download
* - allow_resumable: Set to true to allow previously interrupted downloads to be resumed using a Range GET
*/
public function downloadBucket($directory, $bucket, $keyPrefix = '', array $options = array())
{
$options = new Collection($options);
$builder = $options['builder'] ?: DownloadSyncBuilder::getInstance();
$builder->setDirectory($directory)
->setClient($this)
->setBucket($bucket)
->setKeyPrefix($keyPrefix)
->setConcurrency($options['concurrency'] ?: 10)
->setBaseDir($options['base_dir'])
->force($options['force'])
->setOperationParams($options['params'] ?: array())
->enableDebugOutput($options['debug']);
if ($options['allow_resumable']) {
$builder->allowResumableDownloads();
}
$builder->build()->transfer();
}
/**
* Deletes objects from Amazon S3 that match the result of a ListObjects operation. For example, this allows you
* to do things like delete all objects that match a specific key prefix.
*
* @param string $bucket Bucket that contains the object keys
* @param string $prefix Optionally delete only objects under this key prefix
* @param string $regex Delete only objects that match this regex
* @param array $options Options used when deleting the object:
* - before_delete: Callback to invoke before each delete. The callback will receive a
* Guzzle\Common\Event object with context.
*
* @see Mss\S3\S3Client::listObjects
* @see Mss\S3\Model\ClearBucket For more options or customization
* @return int Returns the number of deleted keys
* @throws RuntimeException if no prefix and no regex is given
*/
public function deleteMatchingObjects($bucket, $prefix = '', $regex = '', array $options = array())
{
if (!$prefix && !$regex) {
throw new RuntimeException('A prefix or regex is required, or use S3Client::clearBucket().');
}
$clear = new ClearBucket($this, $bucket);
$iterator = $this->getIterator('ListObjects', array('Bucket' => $bucket, 'Prefix' => $prefix));
if ($regex) {
$iterator = new FilterIterator($iterator, function ($current) use ($regex) {
return preg_match($regex, $current['Key']);
});
}
$clear->setIterator($iterator);
if (isset($options['before_delete'])) {
$clear->getEventDispatcher()->addListener(ClearBucket::BEFORE_CLEAR, $options['before_delete']);
}
return $clear->clear();
}
/**
* Determines whether or not a resource exists using a command
*
* @param CommandInterface $command Command used to poll for the resource
* @param bool $accept403 Set to true if 403s are acceptable
*
* @return bool
* @throws S3Exception|\Exception if there is an unhandled exception
*/
protected function checkExistenceWithCommand(CommandInterface $command, $accept403 = false)
{
try {
$command->execute();
$exists = true;
} catch (AccessDeniedException $e) {
$exists = (bool) $accept403;
} catch (S3Exception $e) {
$exists = false;
if ($e->getResponse()->getStatusCode() >= 500) {
// @codeCoverageIgnoreStart
throw $e;
// @codeCoverageIgnoreEnd
}
}
return $exists;
}
}
| meituan/mssapi_php | src/Mss/S3/S3Client.php | PHP | apache-2.0 | 28,775 |
/* $Id: IbisCreationFailedException.java 13036 2011-02-24 16:37:33Z ceriel $ */
package nl.esciencecenter.aether;
/**
* Signals that an Ibis instance could not be created.
*/
public class CreationFailedException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs a <code>IbisCreationFailedException</code> with
* <code>null</code> as its error detail message.
*/
public CreationFailedException() {
super();
}
/**
* Constructs a <code>IbisCreationFailedException</code> with the
* specified detail message.
*
* @param detailMessage
* the detail message
*/
public CreationFailedException(String detailMessage) {
super(detailMessage);
}
/**
* Constructs a <code>IbisCreationFailedException</code> with the
* specified detail message and cause.
*
* @param detailMessage
* the detail message
* @param cause
* the cause
*/
public CreationFailedException(String detailMessage, Throwable cause) {
super(detailMessage, cause);
}
/**
* Constructs a <code>IbisCreationFailedException</code> with the
* specified cause.
*
* @param cause
* the cause
*/
public CreationFailedException(Throwable cause) {
super(cause);
}
}
| NLeSC/Aether | src/nl/esciencecenter/aether/CreationFailedException.java | Java | apache-2.0 | 1,398 |
"""
visit https://morvanzhou.github.io/tutorials/ for more!
Build two networks.
1. Without batch normalization
2. With batch normalization
Run tests on these two networks.
"""
# 23 Batch Normalization
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
ACTIVATION = tf.nn.relu
N_LAYERS = 7
N_HIDDEN_UNITS = 30
def fix_seed(seed=1):
# reproducible
np.random.seed(seed)
tf.set_random_seed(seed)
def plot_his(inputs, inputs_norm):
# plot histogram for the inputs of every layer
for j, all_inputs in enumerate([inputs, inputs_norm]):
for i, input in enumerate(all_inputs):
plt.subplot(2, len(all_inputs), j*len(all_inputs)+(i+1))
plt.cla()
if i == 0:
the_range = (-7, 10)
else:
the_range = (-1, 1)
plt.hist(input.ravel(), bins=15, range=the_range, color='#FF5733')
plt.yticks(())
if j == 1:
plt.xticks(the_range)
else:
plt.xticks(())
ax = plt.gca()
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
plt.title("%s normalizing" % ("Without" if j == 0 else "With"))
plt.draw()
plt.pause(0.01)
def built_net(xs, ys, norm):
def add_layer(inputs, in_size, out_size, activation_function=None, norm=False):
# weights and biases (bad initialization for this case)
Weights = tf.Variable(tf.random_normal([in_size, out_size], mean=0., stddev=1.))
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1)
# fully connected product
Wx_plus_b = tf.matmul(inputs, Weights) + biases
# normalize fully connected product
if norm:
# Batch Normalize
fc_mean, fc_var = tf.nn.moments(
Wx_plus_b,
axes=[0], # the dimension you wanna normalize, here [0] for batch
# for image, you wanna do [0, 1, 2] for [batch, height, width] but not channel
)
scale = tf.Variable(tf.ones([out_size]))
shift = tf.Variable(tf.zeros([out_size]))
epsilon = 0.001
# apply moving average for mean and var when train on batch
ema = tf.train.ExponentialMovingAverage(decay=0.5)
def mean_var_with_update():
ema_apply_op = ema.apply([fc_mean, fc_var])
with tf.control_dependencies([ema_apply_op]):
return tf.identity(fc_mean), tf.identity(fc_var)
mean, var = mean_var_with_update()
Wx_plus_b = tf.nn.batch_normalization(Wx_plus_b, mean, var, shift, scale, epsilon)
# similar with this two steps:
# Wx_plus_b = (Wx_plus_b - fc_mean) / tf.sqrt(fc_var + 0.001)
# Wx_plus_b = Wx_plus_b * scale + shift
# activation
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
fix_seed(1)
if norm:
# BN for the first input
fc_mean, fc_var = tf.nn.moments(
xs,
axes=[0],
)
scale = tf.Variable(tf.ones([1]))
shift = tf.Variable(tf.zeros([1]))
epsilon = 0.001
# apply moving average for mean and var when train on batch
ema = tf.train.ExponentialMovingAverage(decay=0.5)
def mean_var_with_update():
ema_apply_op = ema.apply([fc_mean, fc_var])
with tf.control_dependencies([ema_apply_op]):
return tf.identity(fc_mean), tf.identity(fc_var)
mean, var = mean_var_with_update()
xs = tf.nn.batch_normalization(xs, mean, var, shift, scale, epsilon)
# record inputs for every layer
layers_inputs = [xs]
# build hidden layers
for l_n in range(N_LAYERS):
layer_input = layers_inputs[l_n]
in_size = layers_inputs[l_n].get_shape()[1].value
output = add_layer(
layer_input, # input
in_size, # input size
N_HIDDEN_UNITS, # output size
ACTIVATION, # activation function
norm, # normalize before activation
)
layers_inputs.append(output) # add output for next run
# build output layer
prediction = add_layer(layers_inputs[-1], 30, 1, activation_function=None)
cost = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction), reduction_indices=[1]))
train_op = tf.train.GradientDescentOptimizer(0.001).minimize(cost)
return [train_op, cost, layers_inputs]
# make up data
fix_seed(1)
x_data = np.linspace(-7, 10, 2500)[:, np.newaxis]
np.random.shuffle(x_data)
noise = np.random.normal(0, 8, x_data.shape)
y_data = np.square(x_data) - 5 + noise
# plot input data
plt.scatter(x_data, y_data)
plt.show()
xs = tf.placeholder(tf.float32, [None, 1]) # [num_samples, num_features]
ys = tf.placeholder(tf.float32, [None, 1])
train_op, cost, layers_inputs = built_net(xs, ys, norm=False) # without BN
train_op_norm, cost_norm, layers_inputs_norm = built_net(xs, ys, norm=True) # with BN
sess = tf.Session()
sess.run(tf.initialize_all_variables())
# record cost
cost_his = []
cost_his_norm = []
record_step = 5
plt.ion()
plt.figure(figsize=(7, 3))
for i in range(250):
if i % 50 == 0:
# plot histogram
all_inputs, all_inputs_norm = sess.run([layers_inputs, layers_inputs_norm], feed_dict={xs: x_data, ys: y_data})
plot_his(all_inputs, all_inputs_norm)
# train on batch
sess.run([train_op, train_op_norm], feed_dict={xs: x_data[i*10:i*10+10], ys: y_data[i*10:i*10+10]})
if i % record_step == 0:
# record cost
cost_his.append(sess.run(cost, feed_dict={xs: x_data, ys: y_data}))
cost_his_norm.append(sess.run(cost_norm, feed_dict={xs: x_data, ys: y_data}))
plt.ioff()
plt.figure()
plt.plot(np.arange(len(cost_his))*record_step, np.array(cost_his), label='no BN') # no norm
plt.plot(np.arange(len(cost_his))*record_step, np.array(cost_his_norm), label='BN') # norm
plt.legend()
plt.show()
| del680202/MachineLearning-memo | src/tensorflow/BN.py | Python | apache-2.0 | 6,157 |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
const {assert} = require('chai');
const {describe, it} = require('mocha');
const {AutoMlClient} = require('@google-cloud/automl').v1beta1;
const cp = require('child_process');
const DELETE_MODEL_REGION_TAG = 'beta/delete-model.js';
const LOCATION = 'us-central1';
describe('Automl Delete Model Tests', () => {
const client = new AutoMlClient();
it('should delete a model', async () => {
// As model creation can take many hours, instead try to delete a
// nonexistent model and confirm that the model was not found, but other
// elements of the request were valid.
const projectId = await client.getProjectId();
const args = [
DELETE_MODEL_REGION_TAG,
projectId,
LOCATION,
'TRL0000000000000000000',
];
const output = cp.spawnSync('node', args, {encoding: 'utf8'});
assert.match(output.stderr, /NOT_FOUND/);
assert.match(output.stderr, /The model does not exist./);
});
});
| googleapis/nodejs-automl | samples/test/delete-model.beta.test.js | JavaScript | apache-2.0 | 1,548 |
package com.stt.portfolio;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.stt.portfolio.quotes.Quote;
import com.stt.portfolio.quotes.QuoteManager;
import com.stt.portfolio.transactions.I_BookEntryManagement;
public class BookEntryManager implements I_BookEntryManagement {
private Map<String, BookEntry> bookEntries;
private I_TickerManager tickerManager = null;
private double portfolioValue = 0.0;
private String broker;
private CashManager cashManager;
private TaxManager taxManager = null;
public BookEntryManager(I_TickerManager tickerManager,
CashManager cashManager, TaxManager taxManager) {
this.tickerManager = tickerManager;
this.cashManager = cashManager;
this.taxManager = taxManager;
bookEntries = new HashMap<String, BookEntry>();
}
public void clear() {
bookEntries.clear();
}
public void renameBookEntry(String symbol, String newSymbol, Date date) {
if (bookEntries.containsKey(symbol)) {
BookEntry entry = bookEntries.get(symbol);
// System.out.println("Renaming " + symbol + " to " + newSymbol +
// " " + date);
if (bookEntries.containsKey(newSymbol)) {
BookEntry entryNew = bookEntries.get(newSymbol);
entryNew.add(entry);
} else {
entry.changeSymbol(newSymbol);
entry.setName(tickerManager.getName(newSymbol));
bookEntries.put(newSymbol, entry);
}
bookEntries.remove(symbol);
}
}
public void spinoffBookEntry(String symbol, String newSymbol, double ratio, double stockratio,
boolean storeParent) {
if (bookEntries.containsKey(symbol)) {
BookEntry entry = bookEntries.get(symbol);
BookEntry entryNew = createBookEntry(newSymbol);
List<BookEntryItem> items = entryNew.add(entry, storeParent);
// if (symbol.equals("MEO1V")){
// System.out.println("spinoff " + symbol + " " + newSymbol + " " + ratio + " " + stockratio);
// }
if (items == null) {
entryNew.spinoff(ratio, stockratio, entryNew.subEntries);
}
else {
entryNew.spinoff(ratio, stockratio, items);
}
}
}
public void removeBookEntry(String symbol) {
if (bookEntries.containsKey(symbol)) {
bookEntries.remove(symbol);
}
}
public void split(String symbol, double ratio, Date date) {
if (bookEntries.containsKey(symbol)) {
// System.out.println("Split: " + symbol + " " + ratio + " " +
// date);
BookEntry entry = bookEntries.get(symbol);
entry.split(ratio);
}
}
public BookEntry createBookEntry(String symbol) {
BookEntry entry;
if (bookEntries.containsKey(symbol)) {
entry = bookEntries.get(symbol);
} else {
String name = symbol;
String ccy = "";
Stock s = tickerManager.getStock(symbol);
if (s != null) {
name = s.getName();
ccy = s.getCcy();
}
entry = new BookEntry(symbol, name, ccy, broker, cashManager, this,
taxManager);
if (s != null) {
entry.setOption(s.isOption());
} else {
// System.out.println("null " + name);
}
bookEntries.put(symbol, entry);
}
return entry;
}
public void removeEmptyBookEntries() {
for (Iterator<Map.Entry<String, BookEntry>> i = bookEntries.entrySet()
.iterator(); i.hasNext();) {
Map.Entry<String, BookEntry> entry = i.next();
if (entry.getValue().getAmount() < 0.0001) {
i.remove();
}
}
}
public void printEntries() {
Collection<BookEntry> entries = bookEntries.values();
for (BookEntry e : entries) {
if (e.getAmount() > 0.000001) {
e.print();
}
}
}
public Collection<BookEntry> getBookEntryCollection() {
return bookEntries.values();
}
public Map<String, BookEntry> getBookEntries() {
return bookEntries;
}
public void addEntry(BookEntry e) {
if (bookEntries.containsKey(e.getSymbol())) {
BookEntry old = bookEntries.get(e.getSymbol());
old.add(e);
} else {
BookEntry newEntry = new BookEntry(e);
newEntry.setBookEntryManager(this);
bookEntries.put(e.getSymbol(), newEntry);
}
}
private void updateMarketPrices(QuoteManager quoteManager,
I_TickerManager tickerManager, Date date) {
Collection<BookEntry> entries = getBookEntryCollection();
for (BookEntry e : entries) {
if (e.getAmount() > 0.0) {
double rate = 1.0000;
// table[i][j++] = String.format("%1$.2f",
// quoteManager.getQuote(e.getSymbol()).getLast());
double marketPrice = e.getPrice();
Quote quote = quoteManager.getQuote(e.getSymbol(), date);
if (quote != null) {
marketPrice = quote.getLast();
rate = quote.getRate();
e.setQuoteDate(quote.getDate());
}
else {
System.out.println("null quote: " + date + " " + e.getSymbol());
}
Stock stock = tickerManager.getStock(e.getSymbol());
if (stock != null) {
marketPrice = marketPrice / stock.getPriceDivider();
}
e.setMarketPrice(marketPrice / rate);
e.setRate(rate);
}
}
}
private int getItemCount() {
int count = 0;
Collection<BookEntry> entries = getBookEntryCollection();
for (BookEntry e : entries) {
count += e.getChildrenCount();
}
return count;
}
public Object[][] getBookEntryTable(QuoteManager quoteManager, Date date,
boolean showItems) {
int COLUMNS = 15;
XIRRAdapter xirrAdapter = new XIRRAdapter();
updateMarketPrices(quoteManager, tickerManager, date);
List<BookEntry> entries = new ArrayList<BookEntry>();
entries.addAll(getBookEntryCollection());
sortBookEntries(entries);
Object[][] table;
if (showItems) {
table = new Object[getItemCount()][COLUMNS];
} else {
table = new Object[entries.size()][COLUMNS];
}
portfolioValue = 0.0;
int i = 0;
for (BookEntry e : entries) {
if (e.getAmount() > 0.0) {
int j = 0;
Stock stock = tickerManager.getStock(e.getSymbol());
if (!showItems) {
table[i][j++] = (stock != null) ? stock.getName() : e.getSymbol();
table[i][j++] = e.getCcy();
table[i][j++] = Double.valueOf(e.getMarketPrice() * e.getRate());
table[i][j++] = (e.getQuoteDate() != null) ? e
.getQuoteDate() : "N/A";
table[i][j++] = Integer.valueOf((int) e.getAmount());
// table[i][j++] = new Double(e.getCost());
// table[i][j++] = String.format("%1$.2f", e.getPrice());
table[i][j++] = Double.valueOf(e.getPriceOriginalCurrency()); // Show
// buy
// price
// in
// original
// currency
table[i][j++] = e.getPurchaseDate();
table[i][j++] = Double.valueOf(e.getMarketPrice() * e.getRate()
* e.getAmount());
table[i][j++] = Double.valueOf(e.getMarketPrice()
* e.getAmount());
portfolioValue += e.getMarketPrice() * e.getAmount();
table[i][j++] = Double.valueOf(e.getTaxCost());
table[i][j++] = Double.valueOf(e.getDividents());
table[i][j++] = Double.valueOf(e.getProfit(e.getMarketPrice()));
table[i][j++] = Double.valueOf(e.getProfitPercent(e
.getMarketPrice()));
table[i][j++] = Double.valueOf(xirrAdapter.getXirr(e.getQuoteDate(), e));
++i;
} else {
Iterator<BookEntryItem> iter = e.getChildrenIterator();
while (iter.hasNext()) {
j = 0;
BookEntryItem item = iter.next();
table[i][j++] = (stock != null) ? stock.getName() : e.getSymbol();
table[i][j++] = e.getCcy();
table[i][j++] = Double.valueOf(e.getMarketPrice()
* e.getRate());
table[i][j++] = (e.getQuoteDate() != null) ? e
.getQuoteDate() : "N/A";
table[i][j++] = Integer.valueOf((int) item.getAmount());
// table[i][j++] = new Double(e.getCost());
// table[i][j++] = String.format("%1$.2f",
// e.getPrice());
table[i][j++] = Double.valueOf(-item
.getCostInOriginalCurrency()
/ item.getAmount()); // Show buy price in
// original currency
table[i][j++] = item.getPurchaseDate();
table[i][j++] = Double.valueOf(e.getMarketPrice() *
e.getRate() * item.getAmount());
table[i][j++] = Double.valueOf(e.getMarketPrice() *
item.getAmount());
table[i][j++] = Double.valueOf(item.getTaxCost() );
table[i][j++] = Double.valueOf(item.getDividents());
table[i][j++] = Double.valueOf(item.getProfit(e.getMarketPrice()));
table[i][j++] = Double.valueOf(item.getProfitPercent(e.getMarketPrice()));
table[i][j++] = Double.valueOf(xirrAdapter.getXirr(e.getQuoteDate(), e.getMarketPrice(), item));
portfolioValue += e.getMarketPrice() * item.getAmount();
i++;
}
}
}
}
// add weight % from portfolio
for (int k = 0; k < i; ++k) {
if (table[k][8] != null) {
table[k][14] = Double.valueOf(100.0 * (Double) (table[k][8])
/ portfolioValue);
}
}
return table;
}
public List<BookEntry> getBookEntryList(QuoteManager quoteManager, Date date) {
updateMarketPrices(quoteManager, tickerManager, date);
List<BookEntry> entries = new ArrayList<BookEntry>();
entries.addAll(getBookEntryCollection());
sortBookEntries(entries);
return entries;
}
private void sortBookEntries(List<BookEntry> entries) {
Collections.sort(entries, new Comparator<BookEntry>() {
@Override
public int compare(BookEntry o1, BookEntry o2) {
Double v1 = Double.valueOf(o1.getMarketPrice() * o1.getAmount());
Double v2 = Double.valueOf(o2.getMarketPrice() * o2.getAmount());
return v1.compareTo(v2);
}
});
}
public double getPortfolioValue() {
return portfolioValue;
}
public String getBroker() {
return broker;
}
public void setBroker(String broker) {
this.broker = broker;
}
public BookEntry getBookEntry(String ticker) {
return bookEntries.get(ticker);
}
public I_TickerManager getTickerManager() {
return tickerManager;
}
public TaxManager getTaxManager() {
return taxManager;
}
public Set<Entry<String, Double>> getCountryAllocationTable(QuoteManager quoteManager, Date date) {
updateMarketPrices(quoteManager, tickerManager, date);
List<BookEntry> entries = new ArrayList<BookEntry>();
entries.addAll(getBookEntryCollection());
sortBookEntries(entries);
Map<String, Double> allocations = new HashMap<>();
for (BookEntry e : entries) {
if (e.getAmount() > 0.0) {
Stock stock = tickerManager.getStock(e.getSymbol());
if (stock != null) {
String country = stock.getCountry();
if (allocations.containsKey(country)) {
double newAmount = allocations.get(country) + (e.getMarketPrice() * e.getAmount());
allocations.put(country, newAmount);
} else {
allocations.put(country, (e.getMarketPrice() * e.getAmount()));
}
}
}
}
Set<java.util.Map.Entry<String, Double>> allocationEntries = allocations.entrySet();
return allocationEntries;
}
}
| skarna1/javaportfolio | src/com/stt/portfolio/BookEntryManager.java | Java | apache-2.0 | 10,991 |
require 'pathname'
Puppet::Type.newtype(:dsc_sqlserverconfiguration) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC SqlServerConfiguration resource type.
Automatically generated from
'SqlServerDsc/DSCResources/MSFT_SqlServerConfiguration/MSFT_SqlServerConfiguration.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_servername is a required attribute') if self[:dsc_servername].nil?
fail('dsc_instancename is a required attribute') if self[:dsc_instancename].nil?
fail('dsc_optionname is a required attribute') if self[:dsc_optionname].nil?
end
def dscmeta_resource_friendly_name; 'SqlServerConfiguration' end
def dscmeta_resource_name; 'MSFT_SqlServerConfiguration' end
def dscmeta_module_name; 'SqlServerDsc' end
def dscmeta_module_version; '11.1.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
defaultto { :present }
end
# Name: PsDscRunAsCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_psdscrunascredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "PsDscRunAsCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_sensitive_hash!(value)
end
end
# Name: ServerName
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_servername) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "ServerName - The hostname of the SQL Server to be configured."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: InstanceName
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_instancename) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "InstanceName - Name of the SQL instance to be configured."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: OptionName
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_optionname) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "OptionName - The name of the SQL configuration option to be checked."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: OptionValue
# Type: sint32
# IsMandatory: False
# Values: None
newparam(:dsc_optionvalue) do
def mof_type; 'sint32' end
def mof_is_embedded?; false end
desc "OptionValue - The desired value of the SQL configuration option."
validate do |value|
unless value.kind_of?(Numeric) || value.to_i.to_s == value
fail("Invalid value #{value}. Should be a signed Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: RestartService
# Type: boolean
# IsMandatory: False
# Values: None
newparam(:dsc_restartservice) do
def mof_type; 'boolean' end
def mof_is_embedded?; false end
desc "RestartService - Determines whether the instance should be restarted after updating the configuration option."
validate do |value|
end
newvalues(true, false)
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_boolean(value.to_s)
end
end
# Name: RestartTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_restarttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "RestartTimeout - The length of time, in seconds, to wait for the service to restart. Default is 120 seconds."
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_sqlserverconfiguration).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| jpogran/puppetlabs-dsc | lib/puppet/type/dsc_sqlserverconfiguration.rb | Ruby | apache-2.0 | 5,497 |
/*
* (c) 2014 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package com.linkedin.cubert.memory;
import com.linkedin.cubert.block.BlockSchema;
import com.linkedin.cubert.block.ColumnType;
import com.linkedin.cubert.block.DataType;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.linkedin.cubert.utils.DataGenerator;
import java.util.Iterator;
/**
* Unit Test class for IntArrayList, LongArrayList, DoubleArrayList, SegmentedArrayList
*
* Created by spyne on 1/8/15.
*
*/
public class TestSegmentedArrayLists
{
@Test
public void testIntArrayListAddAndGet() throws Exception
{
IntArrayList list = new IntArrayList(101);
DataGenerator dgen = new DataGenerator();
final int size = 1000;
final int[] ints = dgen.randomInts(size);
for (int i = 0; i < size; ++i)
{
list.addInt(ints[i]);
}
Assert.assertEquals(size, list.size());
for (int i = 0; i < size; ++i)
{
Assert.assertEquals(ints[i], list.get(i));
}
for (int i = 0; i < size - 1; ++i)
{
final Integer act1 = ints[i], act2 = ints[i+1];
Assert.assertEquals(act1.compareTo(act2), list.compareIndices(i, i + 1));
}
}
private static int upperBound(int number, int multipleOf)
{
if (number % multipleOf == 0)
return number;
return ((number / multipleOf) + 1) * multipleOf;
}
@Test
public void testIntArrayGrowability() throws Exception
{
final int BATCH_SIZE = 10;
IntArrayList list = new IntArrayList(BATCH_SIZE);
final int MINUS_FOUR = -4;
list.setDefaultValue(MINUS_FOUR);
// ensure that it can hold 25 elements
final int INITIAL_SIZE = 25;
list.ensureCapacity(INITIAL_SIZE);
Assert.assertEquals(list.capacity(), upperBound(INITIAL_SIZE, BATCH_SIZE));
// test that all 25 elements are set to default value
for (int i = 0; i < INITIAL_SIZE; i++)
{
Assert.assertEquals(list.getInt(i), MINUS_FOUR);
}
// update values for some elements
final int NEW_VALUE = 3;
for (int i = 0; i < 10; i++)
{
list.updateInt(i, NEW_VALUE);
}
// test
for (int i = 0; i < 25; i++)
{
Assert.assertEquals(list.getInt(i), i < 10 ? NEW_VALUE : MINUS_FOUR);
}
// resize
final int INCREASED_SIZE = 39;
list.ensureCapacity(INCREASED_SIZE);
Assert.assertEquals(list.capacity(), upperBound(INCREASED_SIZE, BATCH_SIZE));
// test values are not affected by growing
for (int i = 0; i < INCREASED_SIZE; i++)
{
Assert.assertEquals(list.getInt(i), i < 10 ? NEW_VALUE : MINUS_FOUR);
}
// reset
final int RESET_SIZE = 12;
list.reset(RESET_SIZE);
Assert.assertEquals(list.capacity(), upperBound(RESET_SIZE, BATCH_SIZE));;
// test values are reset as well
for (int i = 0; i < RESET_SIZE; i++)
{
Assert.assertEquals(list.getInt(i), MINUS_FOUR);
}
}
@Test
public void testLongArrayListAddAndGet() throws Exception
{
LongArrayList list = new LongArrayList(101);
DataGenerator dgen = new DataGenerator();
final int size = 1000;
final long[] longs = dgen.randomLongs(size);
for (int i = 0; i < size; ++i)
{
list.addLong(longs[i]);
}
Assert.assertEquals(size, list.size());
for (int i = 0; i < size; ++i)
{
Assert.assertEquals(longs[i], list.get(i));
}
for (int i = 0; i < size - 1; ++i)
{
final Long act1 = longs[i], act2 = longs[i+1];
Assert.assertEquals(act1.compareTo(act2), list.compareIndices(i, i + 1));
}
}
@Test
public void testDoubleArrayListAddAndGet() throws Exception
{
DoubleArrayList list = new DoubleArrayList(101);
DataGenerator dgen = new DataGenerator();
final int size = 1000;
final double[] doubles = dgen.randomDoubles(size);
for (int i = 0; i < size; ++i)
{
list.add(doubles[i]);
}
Assert.assertEquals(size, list.size());
for (int i = 0; i < size; ++i)
{
Assert.assertEquals(doubles[i], list.get(i));
}
for (int i = 0; i < size - 1; ++i)
{
final Double act1 = doubles[i], act2 = doubles[i+1];
Assert.assertEquals(act1.compareTo(act2), list.compareIndices(i, i + 1));
}
}
@Test
public void testSegmentedArrayListAddAndGet() throws Exception
{
ObjectArrayList list = new ObjectArrayList(101);
DataGenerator dgen = new DataGenerator();
final int size = 1000;
final String[] strings = dgen.randomStrings(size);
for (int i = 0; i < size; ++i)
{
list.add(strings[i]);
}
Assert.assertEquals(size, list.size());
for (int i = 0; i < size; ++i)
{
Assert.assertEquals(strings[i], list.get(i));
}
for (int i = 0; i < size - 1; ++i)
{
final String act1 = strings[i], act2 = strings[i+1];
Assert.assertEquals(act1.compareTo(act2), list.compareIndices(i, i + 1));
}
}
@Test
public void testBagArrayList() throws Exception
{
SegmentedArrayList array = new BagArrayList(new BlockSchema("INT a, DOUBLE b, STRING c"), false);
final int N = 10000;
DataBag[] bags = new DataBag[N];
int counter = 0;
for (int i = 0; i < N; i++)
{
Tuple[] tuplesInBag = new Tuple[(i % 5) + 1];
for (int j = 0; j < tuplesInBag.length; j++)
{
tuplesInBag[j] = createTuple(counter, counter * 1.0, Integer.toString(counter));
counter++;
}
bags[i] = createBag(tuplesInBag);
}
for (DataBag bag: bags)
array.add(bag);
Assert.assertEquals(array.size, N);
for (int i = 0; i < bags.length; i++)
{
assertBagEqual((DataBag) array.get(i), bags[i]);
}
}
@Test
public void testNestedSchema() throws Exception
{
ColumnType tupleFieldType =
new ColumnType("element", DataType.TUPLE, new BlockSchema("STRING name, STRING term, FLOAT value"));
BlockSchema tupleSchema = new BlockSchema(new ColumnType[] { tupleFieldType });
ColumnType bagType = new ColumnType("bag", DataType.BAG, tupleSchema);
BlockSchema schema = new BlockSchema(new ColumnType[]
{
new ColumnType("member_id", DataType.INT),
bagType
});
final int N = 10000;
int counter = 1;
Tuple[] data = new Tuple[N];
for (int i = 0; i < N; i++)
{
Tuple[] tuplesInBag = new Tuple[(i % 5) + 1];
for (int j = 0; j < tuplesInBag.length; j++)
{
tuplesInBag[j] = createTuple("name " + counter,
"term " + counter,
(counter % 3 == 0) ? null : counter * 1.0f);
counter++;
}
DataBag bag = createBag(tuplesInBag);
data[i] = createTuple(i, bag);
}
ColumnarTupleStore store = new ColumnarTupleStore(schema, true);
for (Tuple t: data)
store.addToStore(t);
Assert.assertEquals(store.getNumTuples(), N);
for (int i = 0; i < N; i++)
{
Tuple actual = store.getTuple(i, null);
Tuple expected = data[i];
Assert.assertEquals(actual.get(0), expected.get(0));
assertBagEqual((DataBag) actual.get(1), (DataBag) expected.get(1));
}
}
private Tuple createTuple(Object... args) throws ExecException
{
Tuple tuple = TupleFactory.getInstance().newTuple(args.length);
for (int i = 0; i < args.length; i++)
{
tuple.set(i, args[i]);
}
return tuple;
}
private DataBag createBag(Tuple... tuples)
{
DataBag bag = BagFactory.getInstance().newDefaultBag();
for (Tuple tuple: tuples)
bag.add(tuple);
return bag;
}
private void assertBagEqual(DataBag bag1, DataBag bag2)
{
Iterator<Tuple> it1 = bag1.iterator();
Iterator<Tuple> it2 = bag2.iterator();
while (it1.hasNext())
{
Assert.assertTrue(it2.hasNext());
Tuple tuple1 = it1.next();
Tuple tuple2 = it2.next();
Assert.assertEquals(tuple1, tuple2, tuple1.toString() + " != " + tuple2.toString());
}
Assert.assertFalse(it2.hasNext());
}
}
| linkedin/Cubert | src/test/java/com/linkedin/cubert/memory/TestSegmentedArrayLists.java | Java | apache-2.0 | 9,815 |
using System;
using Telerik.Data.Core;
using Telerik.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
namespace Telerik.UI.Xaml.Controls.Data
{
internal abstract class DataCurrencyService : ServiceBase<RadControl>
{
// internal bool isSynchronizedWithCurrent;
internal bool ensureCurrentIntoView = true;
private object currentItem;
private bool updatingCurrent;
private ICollectionView itemsSourceAsCollectionView;
public DataCurrencyService(RadControl owner)
: base(owner)
{
}
public event EventHandler CurrentChanged;
public event CurrentChangingEventHandler CurrentChanging;
public object CurrentItem
{
get
{
return this.currentItem;
}
}
internal virtual void OnDataViewChanged(ViewChangedEventArgs args)
{
if (this.currentItem == null)
{
return;
}
}
internal void OnGroupExpandStateChanged()
{
if (this.currentItem == null)
{
return;
}
this.UpdateOwnerState(false);
}
internal bool MoveCurrentTo(object item)
{
this.ChangeCurrentItem(item, true, true);
return object.ReferenceEquals(this.currentItem, item);
}
internal virtual bool MoveCurrentToNext()
{
var nextItem = this.FindPreviousOrNextItem(this.currentItem, true);
if (nextItem == null)
{
return false;
}
this.ChangeCurrentItem(nextItem, true, true);
return object.ReferenceEquals(this.currentItem, nextItem);
}
internal bool MoveCurrentPrevious()
{
var prevItem = this.FindPreviousOrNextItem(this.currentItem, false);
if (prevItem == null)
{
return false;
}
this.ChangeCurrentItem(prevItem, true, true);
return object.ReferenceEquals(this.currentItem, prevItem);
}
internal bool MoveCurrentToFirst()
{
var firstItem = this.FindFirstItem();
if (firstItem == null)
{
return false;
}
this.ChangeCurrentItem(firstItem, true, true);
return object.ReferenceEquals(this.currentItem, firstItem);
}
internal bool MoveCurrentToLast()
{
var lastItem = this.FindLastItem();
if (lastItem == null)
{
return false;
}
this.ChangeCurrentItem(lastItem, true, true);
return object.ReferenceEquals(this.currentItem, lastItem);
}
internal void OnSelectedItemChanged(object newItem)
{
}
internal void OnItemsSourceChanged(object newSource)
{
if (this.itemsSourceAsCollectionView != null)
{
this.itemsSourceAsCollectionView.CurrentChanged -= this.OnItemsSourceCurrentChanged;
}
this.itemsSourceAsCollectionView = newSource as ICollectionView;
if (this.itemsSourceAsCollectionView != null)
{
this.itemsSourceAsCollectionView.CurrentChanged += this.OnItemsSourceCurrentChanged;
this.ChangeCurrentItem(this.itemsSourceAsCollectionView.CurrentItem, false, false);
}
else
{
this.ChangeCurrentItem(null, false, false);
}
}
internal virtual void OnDataBindingComplete(bool scrollToCurrent)
{
this.UpdateOwnerState(scrollToCurrent);
}
internal bool ChangeCurrentItem(object newCurrentItem, bool cancelable, bool scrollToCurrent)
{
if (this.updatingCurrent)
{
return false;
}
if (object.ReferenceEquals(this.currentItem, newCurrentItem))
{
return true;
}
return this.ChangeCurrentCore(newCurrentItem, cancelable, scrollToCurrent);
}
internal virtual bool RefreshCurrentItem(bool scrollToCurrent)
{
return this.ChangeCurrentCore(this.currentItem, false, scrollToCurrent);
}
protected abstract object FindPreviousOrNextItem(object currentItem, bool next);
protected abstract object FindFirstItem();
protected abstract object FindLastItem();
protected bool ChangeCurrentCore(object newCurrent, bool cancelable, bool scrollToCurrent)
{
// Raise CurrentChanging first
bool cancel = this.PreviewCancelCurrentChanging(cancelable);
if (cancel)
{
// the change is canceled
return false;
}
return this.ChangeCurrentCoreOverride(newCurrent, scrollToCurrent);
}
protected virtual bool ChangeCurrentCoreOverride(object newCurrent, bool scrollToCurrent)
{
this.updatingCurrent = true;
var oldCurrent = this.currentItem;
this.currentItem = newCurrent;
this.UpdateOwnerState(scrollToCurrent);
if (!object.ReferenceEquals(oldCurrent, this.currentItem))
{
this.OnCurrentChanged(EventArgs.Empty);
}
this.updatingCurrent = false;
return true;
}
protected abstract void UpdateOwnerState(bool scrollToCurrent);
private void OnItemsSourceCurrentChanged(object sender, object e)
{
}
private bool PreviewCancelCurrentChanging(bool cancelable)
{
var eh = this.CurrentChanging;
if (eh == null)
{
return false;
}
var args = new CurrentChangingEventArgs(cancelable);
eh(this.Owner, args);
return args.Cancel;
}
private void OnCurrentChanged(EventArgs args)
{
var eh = this.CurrentChanged;
if (eh != null)
{
eh(this.Owner, args);
}
}
}
} | geotinc/UI-For-UWP | Controls/DataControls/DataControls.UWP/Common/DataCurrencyService.cs | C# | apache-2.0 | 6,338 |
package cn.com.esrichina.adapter.vcloud.network;
import java.util.ArrayList;
import java.util.List;
import cn.com.esrichina.adapter.AdapterException;
import cn.com.esrichina.adapter.domain.INetwork;
import cn.com.esrichina.vcloud.VdcEntity;
import com.vmware.vcloud.api.rest.schema.AllocatedIpAddressType;
import com.vmware.vcloud.api.rest.schema.IpRangeType;
import com.vmware.vcloud.api.rest.schema.IpRangesType;
import com.vmware.vcloud.api.rest.schema.IpScopeType;
import com.vmware.vcloud.api.rest.schema.IpScopesType;
import com.vmware.vcloud.api.rest.schema.NetworkConfigurationType;
import com.vmware.vcloud.api.rest.schema.ReferenceType;
import com.vmware.vcloud.sdk.AllocatedIpAddress;
import com.vmware.vcloud.sdk.OrgVdcNetwork;
import com.vmware.vcloud.sdk.VCloudException;
import com.vmware.vcloud.sdk.VcloudClient;
import com.vmware.vcloud.sdk.Vdc;
public class NetworkConfigOfVdc implements INetwork {
private String networkConfigName;
private String fenceMode;
private OrgVdcNetwork orgVdcNetwork;
private VcloudClient vcloudClient;
private List<SubNetworkConfig> subNetworkConfigs = new ArrayList<SubNetworkConfig>();;
public NetworkConfigOfVdc() {
}
public NetworkConfigOfVdc(String networkConfigName, VcloudClient vcloudClient, OrgVdcNetwork orgVdcNetwork) {
this.networkConfigName = networkConfigName;
this.vcloudClient = vcloudClient;
this.orgVdcNetwork = orgVdcNetwork;
}
public VdcEntity getVdc() {
VdcEntity vdce = null;
try {
ReferenceType vdcRef = orgVdcNetwork.getVdcReference();
Vdc vdc = Vdc.getVdcByReference(vcloudClient, vdcRef);
vdce = new VdcEntity(vdc.getResource().getName(), vcloudClient, vdc);
} catch (VCloudException e) {
e.printStackTrace();
}
return vdce;
}
public String getNetworkConfigName() {
return networkConfigName;
}
public String getStatus() {
if (orgVdcNetwork.getResource().getStatus() == 1) {
return "normal";
} else {
return "abnormal";
}
}
@Override
public String getName() throws AdapterException {
return orgVdcNetwork.getResource().getName();
}
@Override
public String getId() throws AdapterException {
return orgVdcNetwork.getResource().getName();
}
@Override
public List<String> getAllocatedIpAddresses() {
List<String> availableIpAddresses = new ArrayList<String>();
try {
List<AllocatedIpAddress> allocatedIpAddresses = orgVdcNetwork.getAllocatedAddresses();
for (AllocatedIpAddress aia : allocatedIpAddresses) {
AllocatedIpAddressType aiatype = aia.getResource();
if (aiatype != null) {
String ipaddress = aiatype.getIpAddress();
//System.out.println("allocated ipaddress:" + ipaddress);
availableIpAddresses.add(ipaddress);
}
}
} catch (VCloudException e) {
e.printStackTrace();
}
return availableIpAddresses;
}
@Override
public List<String> getAllIpAddresses() {
List<String> allIpAddresses = new ArrayList<String>();
NetworkConfigurationType networkconfigurationtype = orgVdcNetwork.getConfiguration();
IpScopesType ipScopesType = networkconfigurationtype.getIpScopes();
List<IpScopeType> ipScopeTypes = ipScopesType.getIpScope();
if (ipScopeTypes != null) {
for (IpScopeType ipScopeType : ipScopeTypes) {
boolean isEnabled = ipScopeType.isIsEnabled();
if (isEnabled) {
//boolean isinherited = ipScopeType.isIsInherited();
IpRangesType ipRangesType = ipScopeType.getIpRanges();
List<IpRangeType> ipRangeTypes = ipRangesType.getIpRange();
for (IpRangeType iprangetype : ipRangeTypes) {
String startaddress = iprangetype.getStartAddress();
String endaddress = iprangetype.getEndAddress();
if (startaddress != null && endaddress != null) {
String fromSub = startaddress.substring(0, startaddress.lastIndexOf(".") + 1);
String fromEnd = startaddress.substring(startaddress.lastIndexOf(".") + 1,
startaddress.length());
String toEnd = endaddress.substring(endaddress.lastIndexOf(".") + 1, endaddress.length());
int ifrom = Integer.parseInt(fromEnd);
int ito = Integer.parseInt(toEnd);
for (int i = ifrom; i < ito + 1; i++) {
String ip = fromSub + i;
allIpAddresses.add(ip);
}
}
}
}
}
}
return allIpAddresses;
}
@Override
public List<String> getAvailableIpAddresses() throws AdapterException {
//available = all - allocated
List<String> allIpAddresses = new ArrayList<String>();
NetworkConfigurationType networkconfigurationtype = orgVdcNetwork.getConfiguration();
IpScopesType ipScopesType = networkconfigurationtype.getIpScopes();
List<IpScopeType> ipScopeTypes = ipScopesType.getIpScope();
List<String> allocatedIps = getAllocatedIpAddresses();
if (ipScopeTypes != null) {
for (IpScopeType ipScopeType : ipScopeTypes) {
boolean isEnabled = ipScopeType.isIsEnabled();
if (isEnabled) {
//boolean isinherited = ipScopeType.isIsInherited();
IpRangesType ipRangesType = ipScopeType.getIpRanges();
List<IpRangeType> ipRangeTypes = ipRangesType.getIpRange();
for (IpRangeType iprangetype : ipRangeTypes) {
String startaddress = iprangetype.getStartAddress();
String endaddress = iprangetype.getEndAddress();
if (startaddress != null && endaddress != null) {
String fromSub = startaddress.substring(0, startaddress.lastIndexOf(".") + 1);
String fromEnd = startaddress.substring(startaddress.lastIndexOf(".") + 1,
startaddress.length());
String toEnd = endaddress.substring(endaddress.lastIndexOf(".") + 1, endaddress.length());
int ifrom = Integer.parseInt(fromEnd);
int ito = Integer.parseInt(toEnd);
for (int i = ifrom; i < ito + 1; i++) {
String ip = fromSub + i;
if(!allocatedIps.contains(ip)) {
allIpAddresses.add(ip);
}
}
}
}
}
}
}
return allIpAddresses;
}
public OrgVdcNetwork getOrgVdcNetwork() {
return orgVdcNetwork;
}
public List<SubNetworkConfig> subNetworkConfigs() {
NetworkConfigurationType networkconfigurationtype = orgVdcNetwork.getConfiguration();
IpScopesType ipScopesType = networkconfigurationtype.getIpScopes();
List<IpScopeType> ipScopeTypes = ipScopesType.getIpScope();
if (ipScopeTypes != null) {
for (IpScopeType ipScopeType : ipScopeTypes) {
SubNetworkConfig subNetworkConfig = new SubNetworkConfig();
boolean isEnabled = ipScopeType.isIsEnabled();
if (isEnabled) {
//boolean isinherited = ipScopeType.isIsInherited();
subNetworkConfig.setDns1(ipScopeType.getDns1());
subNetworkConfig.setDns2(ipScopeType.getDns2());
subNetworkConfig.setDnsSuffix(ipScopeType.getDnsSuffix());
subNetworkConfig.setGateway(ipScopeType.getGateway());
List<IpRange> ipRanges = new ArrayList<IpRange>();
for(IpRangeType ipRangeType : ipScopeType.getIpRanges().getIpRange()) {
IpRange ipRange = new IpRange();
ipRange.setStartIpAddress(ipRangeType.getStartAddress());
ipRange.setEndIpAddress(ipRangeType.getEndAddress());
ipRanges.add(ipRange);
}
subNetworkConfig.setIpRanges(ipRanges);
subNetworkConfigs.add(subNetworkConfig);
}
}
}
return subNetworkConfigs;
}
public String getFenceMode() {
if (fenceMode != null) {
return fenceMode;
}
return orgVdcNetwork.getConfiguration().getFenceMode();
}
}
| mouse3150/blooming | general-iaas-adapter/src/main/java/cn/com/esrichina/adapter/vcloud/network/NetworkConfigOfVdc.java | Java | apache-2.0 | 7,645 |
/*
* Copyright 2019 Kantega AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kantega.respiro.collector.cxf;
import org.kantega.respiro.collector.Collector;
import org.kantega.respiro.collector.ExchangeInfo;
import org.kantega.respiro.collector.ExchangeMessage;
import org.kantega.respiro.cxf.api.EndpointCustomizer;
import org.kantega.respiro.cxf.api.ServiceCustomizer;
import org.apache.cxf.endpoint.Client;
import org.apache.cxf.frontend.ClientProxy;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.interceptor.LoggingInInterceptor;
import org.apache.cxf.interceptor.LoggingMessage;
import org.apache.cxf.interceptor.LoggingOutInterceptor;
import org.apache.cxf.jaxws.EndpointImpl;
import org.apache.cxf.message.Exchange;
import org.apache.cxf.message.Message;
import javax.xml.ws.BindingProvider;
import javax.xml.ws.Endpoint;
import java.util.logging.Logger;
/**
*
*/
public class MessageCollectorCustomizer implements EndpointCustomizer,ServiceCustomizer {
ThreadLocal<Exchange> exchangeLocal = new ThreadLocal<>();
@Override
public void customizeEndpoint(Endpoint endpoint) {
EndpointImpl e = (EndpointImpl) endpoint;
e.getInInterceptors().add(new LoggingInInterceptor() {
@Override
public void handleMessage(Message message) throws Fault {
exchangeLocal.set(message.getExchange());
super.handleMessage(message);
}
@Override
protected String formatLoggingMessage(LoggingMessage loggingMessage) {
Exchange exchange = exchangeLocal.get();
exchange.put(ExchangeInfo.EXCHANGE_INFO, Collector.newCollectionContext(new CxfExhangeMessage(ExchangeMessage.Type.REQUEST, loggingMessage)));
return null;
}
@Override
protected void log(Logger logger, String message) {
// Empty
}
});
LoggingOutInterceptor outInterceptor = new LoggingOutInterceptor() {
@Override
protected String formatLoggingMessage(LoggingMessage loggingMessage) {
Exchange exchange = exchangeLocal.get();
ExchangeInfo exchangeInfo = (ExchangeInfo) exchange.get(ExchangeInfo.EXCHANGE_INFO);
exchangeInfo.setOutMessage(new CxfExhangeMessage(ExchangeMessage.Type.RESPONSE, loggingMessage));
Collector.endCollectionContext();
Collector.clearCollectionContext();
return null;
}
@Override
protected void log(Logger logger, String message) {
// Empty
}
};
e.getOutInterceptors().add(outInterceptor);
e.getOutFaultInterceptors().add(outInterceptor);
}
@Override
public void customizeService(BindingProvider bindingProvider) {
Client client = ClientProxy.getClient(bindingProvider);
client.getOutInterceptors().add(new LoggingOutInterceptor() {
@Override
protected String formatLoggingMessage(LoggingMessage loggingMessage) {
Collector.getCurrent().ifPresent(
exchangeInfo -> exchangeInfo.addBackendMessage(new CxfExhangeMessage(ExchangeMessage.Type.REQUEST, loggingMessage))
);
return null;
}
@Override
protected void log(Logger logger, String message) {
}
});
LoggingInInterceptor inInterceptor = new LoggingInInterceptor() {
@Override
protected String formatLoggingMessage(LoggingMessage loggingMessage) {
Collector.getCurrent().ifPresent(
exchangeInfo -> exchangeInfo.addBackendMessage(new CxfExhangeMessage(ExchangeMessage.Type.RESPONSE, loggingMessage))
);
return null;
}
@Override
protected void log(Logger logger, String message) {
}
};
client.getInInterceptors().add(inInterceptor);
client.getInFaultInterceptors().add(inInterceptor);
}
}
| kantega/respiro | plugins/core/message-collector/src/main/java/org/kantega/respiro/collector/cxf/MessageCollectorCustomizer.java | Java | apache-2.0 | 4,695 |
/* Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package framework
import (
"encoding/json"
"errors"
"fmt"
"sort"
"strconv"
"strings"
"sync"
utils "github.com/elodina/go-mesos-utils"
)
type Cluster struct {
frameworkID string
storage utils.Storage
tasks map[string]Task
taskLock sync.Mutex
}
func NewCluster() *Cluster {
storage, err := NewStorage(Config.Storage)
if err != nil {
panic(err)
}
return &Cluster{
storage: storage,
tasks: make(map[string]Task),
}
}
func (c *Cluster) Exists(id string) bool {
c.taskLock.Lock()
defer c.taskLock.Unlock()
_, exists := c.tasks[id]
return exists
}
func (c *Cluster) Add(task Task) {
c.taskLock.Lock()
defer c.taskLock.Unlock()
fmt.Println(task.Data())
c.tasks[task.Data().ID] = task
Logger.Infof("Added task:\n%s", task)
}
func (c *Cluster) Remove(id string) {
c.taskLock.Lock()
defer c.taskLock.Unlock()
delete(c.tasks, id)
Logger.Infof("Removed task %s", id)
}
func (c *Cluster) Get(id string) Task {
return c.tasks[id]
}
func (c *Cluster) GetAllTasks() []Task {
c.taskLock.Lock()
defer c.taskLock.Unlock()
tasks := make([]Task, 0)
for _, task := range c.tasks {
tasks = append(tasks, task)
}
return tasks
}
func (c *Cluster) GetConstrained() []utils.Constrained {
c.taskLock.Lock()
defer c.taskLock.Unlock()
constrained := make([]utils.Constrained, 0)
for _, task := range c.tasks {
constrained = append(constrained, task.Data())
}
return constrained
}
func (c *Cluster) GetTasksWithState(state TaskState) []Task {
c.taskLock.Lock()
defer c.taskLock.Unlock()
tasks := make([]Task, 0)
for _, task := range c.tasks {
if task.Data().State == state {
tasks = append(tasks, task)
}
}
return tasks
}
func (c *Cluster) IsReconciling() bool {
return len(c.GetTasksWithState(TaskStateReconciling)) > 0
}
func (c *Cluster) ExpandIDs(expr string) ([]string, error) {
if expr == "" {
return nil, errors.New("ID expression cannot be empty")
}
ids := make([]string, 0)
ranges := strings.Split(expr, ",")
for _, rangeExpr := range ranges {
if rangeExpr == "*" {
tasks := c.GetAllTasks()
for _, task := range tasks {
ids = append(ids, task.Data().ID)
}
sort.Strings(ids)
return ids, nil
} else {
rng, err := utils.ParseRange(rangeExpr)
if err != nil {
return nil, err
}
for _, value := range rng.Values() {
ids = append(ids, strconv.Itoa(value))
}
}
}
sort.Strings(ids)
return ids, nil
}
func (c *Cluster) Save() {
jsonMap := make(map[string]interface{})
jsonMap["frameworkID"] = c.frameworkID
jsonMap["tasks"] = c.GetAllTasks()
js, err := json.Marshal(jsonMap)
if err != nil {
panic(err)
}
c.storage.Save(js)
}
func (c *Cluster) Load() {
js, err := c.storage.Load()
if err != nil || js == nil {
Logger.Warnf("Could not load cluster state from %s, assuming no cluster state available...", c.storage)
return
}
//golang's dynamic type detection is really shitty
jsonMap := make(map[string]json.RawMessage)
err = json.Unmarshal(js, &jsonMap)
if err != nil {
panic(err)
}
err = json.Unmarshal(jsonMap["frameworkID"], &c.frameworkID)
if err != nil {
panic(err)
}
rawTasks := make([]map[string]json.RawMessage, 0)
err = json.Unmarshal(jsonMap["tasks"], &rawTasks)
if err != nil {
panic(err)
}
for _, rawTask := range rawTasks {
taskData := &TaskData{}
err = json.Unmarshal(rawTask["data"], taskData)
if err != nil {
panic(err)
}
var taskType string
json.Unmarshal(rawTask["type"], &taskType)
switch taskType {
case TaskTypeMirrorMaker:
c.Add(&MirrorMakerTask{&CommonTask{TaskData: taskData}})
case TaskTypeConsumer:
c.Add(&ConsumerTask{&CommonTask{TaskData: taskData}})
default:
panic(fmt.Errorf("Unknown task type %s", taskType))
}
}
}
func NewStorage(storage string) (utils.Storage, error) {
storageTokens := strings.SplitN(storage, ":", 2)
if len(storageTokens) != 2 {
return nil, fmt.Errorf("Unsupported storage")
}
switch storageTokens[0] {
case "file":
return utils.NewFileStorage(storageTokens[1]), nil
case "zk":
return utils.NewZKStorage(storageTokens[1])
default:
return nil, fmt.Errorf("Unsupported storage")
}
}
| elodina/go-kafka-client-mesos | framework/cluster.go | GO | apache-2.0 | 4,921 |
package ru.job4j.lite.sort;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
/**
* Class SortUser.
*
* @author Aleksey Slivko
* @version $1.0$
* @since 02.06.2017
*/
public class SortUser {
/**
* Convert List of users to TreeSet and sort users by age.
* @param list of users.
* @return TreeSet of users.
*/
public Set<User> sort(List<User> list) {
Set<User> users = new TreeSet<>();
users.addAll(list);
return users;
}
/**
* Sort List by hashCode.
* @param list of users.
* @return sorted list of users.
*/
public List<User> sortHash(List<User> list) {
list.sort((o1, o2) -> Integer.compare(o1.hashCode(), o2.hashCode()));
return list;
}
/**
* Sort List by name length.
* @param list of users.
* @return sorted list of users.
*/
public List<User> sortLength(List<User> list) {
list.sort((o1, o2) -> o1.getName().length() - o2.getName().length());
return list;
}
}
| sllexa/junior | chapter_003/src/main/java/ru/job4j/lite/sort/SortUser.java | Java | apache-2.0 | 1,048 |
/**
* Copyright 2010 - 2022 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.exodus.util;
/**
* Non-concurrent replacement of java.util.Random.
*/
public class Random {
private static final long multiplier = 0x5DEECE66DL;
private static final long addend = 0xBL;
private static final long mask = (1L << 48) - 1;
private long seed;
public Random() {
reset();
}
public Random(long seed) {
setSeed(seed);
}
public int nextInt() {
return next(32);
}
public int nextInt(int n) {
if (n <= 0)
throw new IllegalArgumentException("n must be positive");
if ((n & -n) == n) // i.e., n is a power of 2
return (int) ((n * (long) next(31)) >> 31);
int bits, val;
do {
bits = next(31);
val = bits % n;
} while (bits - val + (n - 1) < 0);
return val;
}
public long nextLong() {
// it's okay that the bottom word remains signed.
return ((long) (next(32)) << 32) + next(32);
}
public boolean nextBoolean() {
return next(1) != 0;
}
public float nextFloat() {
return next(24) / ((float) (1 << 24));
}
public double nextDouble() {
return (((long) (next(26)) << 27) + next(27)) / (double) (1L << 53);
}
public void nextBytes(byte[] bytes) {
int i = 0;
int len = bytes.length;
while (i < len)
for (int rnd = nextInt(),
n = Math.min(len - i, Integer.SIZE / Byte.SIZE);
n-- > 0; rnd >>= Byte.SIZE)
bytes[i++] = (byte) rnd;
}
public void setSeed(long seed) {
this.seed = (seed ^ multiplier) & mask;
for (int i = 0; i < 10; ++i) {
nextInt();
}
}
public void reset() {
setSeed(System.currentTimeMillis());
}
public int next(int bits) {
seed = (seed * multiplier + addend) & mask;
return (int) (seed >>> (48 - bits));
}
}
| JetBrains/xodus | utils/src/main/java/jetbrains/exodus/util/Random.java | Java | apache-2.0 | 2,585 |
package com.suscipio_solutions.consecro_mud.Abilities.Prayers;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.MendingSkill;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Items.interfaces.Wearable;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMClass;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.interfaces.Physical;
@SuppressWarnings("rawtypes")
public class Prayer_RockFlesh extends Prayer implements MendingSkill
{
@Override public String ID() { return "Prayer_RockFlesh"; }
private final static String localizedName = CMLib.lang().L("Rock Flesh");
@Override public String name() { return localizedName; }
@Override public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_RESTORATION;}
@Override protected int canTargetCode(){return CAN_ITEMS;}
@Override public long flags(){return Ability.FLAG_HOLY;}
@Override public int abstractQuality(){ return Ability.QUALITY_INDIFFERENT;}
@Override
public boolean supportsMending(Physical item)
{
if(!(item instanceof MOB)) return false;
return (item.fetchEffect("Spell_FleshStone")!=null)||(item.fetchEffect("Prayer_FleshRock")!=null);
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
final Physical target=getTarget(mob,mob.location(),givenTarget,commands,Wearable.FILTER_UNWORNONLY);
if(target==null) return false;
Ability revokeThis=null;
for(int a=0;a<target.numEffects();a++) // personal affects
{
final Ability A=target.fetchEffect(a);
if((A!=null)&&(A.canBeUninvoked())
&&((A.ID().equalsIgnoreCase("Spell_FleshStone"))
||(A.ID().equalsIgnoreCase("Prayer_FleshRock"))))
{
revokeThis=A;
break;
}
}
if(revokeThis==null)
{
if(auto)
mob.tell(L("Nothing happens."));
else
mob.tell(mob,target,null,L("<T-NAME> can not be affected by this prayer."));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> @x1 to dispel @x2 from <T-NAMESELF>.^?",prayForWord(mob),revokeThis.name()));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
revokeThis.unInvoke();
}
}
else
beneficialWordsFizzle(mob,target,L("<S-NAME> @x1 on <T-YOUPOSS> behalf, but flub(s) it.",prayWord(mob)));
// return whether it worked
return success;
}
}
| ConsecroMUD/ConsecroMUD | com/suscipio_solutions/consecro_mud/Abilities/Prayers/Prayer_RockFlesh.java | Java | apache-2.0 | 2,809 |
// Copyright (c) 2021 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
using System;
namespace Alachisoft.NCache.Client
{
/// <summary>
/// Specifies the reason an item was removed from the <see cref="ICache"/>.
/// </summary>
/// <remarks>
/// This enumeration works in concert with the <see cref="CacheItemRemovedCallback"/> delegate to
/// notify your applications when and why an object was removed from the <see cref="Cache"/>.</remarks>
///<requirements>
/// <constraint>This member is not available in SessionState edition.</constraint>
/// </requirements>
[Serializable]
public enum CacheItemRemovedReason
{
/// <summary>
/// The item is removed from the cache because it expired.
/// </summary>
Expired,
/// <summary>
/// The item is removed from the cache by a <see cref="Cache.Remove"/> method call or by an
/// <see cref="o:Cache.Insert"/> method call that specified the same key.
/// </summary>
Removed,
/// <summary>
/// The item is removed from the cache because the system removed it to free memory.
/// </summary>
Underused
}
}
| Alachisoft/NCache | Src/NCClient/Web/Caching/CacheItemRemovedReason.cs | C# | apache-2.0 | 1,628 |
# Copyright (c) 2012 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os
from setuptools import setup
def get_static_files(path):
return [os.path.join(dirpath.replace("luigi/", ""), ext)
for (dirpath, dirnames, filenames) in os.walk(path)
for ext in ["*.html", "*.js", "*.css", "*.png",
"*.eot", "*.svg", "*.ttf", "*.woff", "*.woff2"]]
luigi_package_data = sum(map(get_static_files, ["luigi/static", "luigi/templates"]), [])
readme_note = """\
.. note::
For the latest source, discussion, etc, please visit the
`GitHub repository <https://github.com/spotify/luigi>`_\n\n
"""
with open('README.rst') as fobj:
long_description = readme_note + fobj.read()
install_requires = [
'tornado>=4.0,<5',
'python-daemon<3.0',
]
if os.environ.get('READTHEDOCS', None) == 'True':
# So that we can build documentation for luigi.db_task_history and luigi.contrib.sqla
install_requires.append('sqlalchemy')
# readthedocs don't like python-daemon, see #1342
install_requires.remove('python-daemon<3.0')
install_requires.append('sphinx>=1.4.4') # Value mirrored in doc/conf.py
setup(
name='luigi',
version='2.3.3patched1',
description='Workflow mgmgt + task scheduling + dependency resolution',
long_description=long_description,
author='Erik Bernhardsson',
url='https://github.com/spotify/luigi',
license='Apache License 2.0',
packages=[
'luigi',
'luigi.contrib',
'luigi.contrib.hdfs',
'luigi.tools'
],
package_data={
'luigi': luigi_package_data
},
entry_points={
'console_scripts': [
'luigi = luigi.cmdline:luigi_run',
'luigid = luigi.cmdline:luigid',
'luigi-grep = luigi.tools.luigi_grep:main',
'luigi-deps = luigi.tools.deps:main',
'luigi-deps-tree = luigi.tools.deps_tree:main',
'luigi-migrate = luigi.tools.migrate:main'
]
},
install_requires=install_requires,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: System :: Monitoring',
],
)
| Viktor-Evst/fixed-luigi | setup.py | Python | apache-2.0 | 3,100 |
<?php
$tpl->assign(array(
'L_INDEX' => 'Homepage',
'L_SITEMAP' => 'Sitemap',
'L_ACP' => 'Administrace',
'L_ACP_INDEX' => 'Úvod administrace',
'L_NO' => 'Ne',
'L_YES' => 'Ano',
'L_BACK' => 'Zpět na předchozí stránku',
'L_NEXT' => 'Další »',
'L_PREV' => '« Předchozí',
'L_CONFIG' => 'Nastavení',
'L_CONFIG_SUBMIT' => 'Změnit nastavení',
'L_HEADER' => 'Nadpis',
'L_SLUG' => 'Slug',
'L_SLUG_GENERATE' => 'Vygenerovat automaticky <em>(doporučeno!)</em>',
'L_SLUG_GENERATE_NOREC' => 'Vygenerovat automaticky z nadpisu',
'L_SLUG_KEEP_ORIGINAL' => 'Ponechat původní <em>(doporučeno!)</em>',
'L_SLUG_INPUT' => 'Zadat vlastní',
'L_DATE' => 'Datum',
'L_VALUE' => 'Hodnota',
'L_TEXT' => 'Text',
'L_LINK' => 'Odkaz',
'L_MOVE' => 'Posun',
'L_UP' => 'Nahoru',
'L_DOWN' => 'Dolu',
'L_SEARCHED_TERM' => 'Hledaný termín',
'L_MODULE_LINK' => 'Strana modulu',
'L_OVERVIEW' => 'Přehled',
'L_ERROR' => 'Chyba',
'L_SUCCESS' => 'OK',
'L_EDIT' => 'Upravit',
'L_DELETE' => 'Smazat',
'L_SHOW' => 'Zobrazit',
'L_HIDE' => 'Skrýt',
'L_SHOWING' => 'Zobrazení',
// PERMISSIONS
'L_PERMISSIONS_VALUE.read' => 'Číst',
'L_PERMISSIONS_VALUE.edit' => 'Upravovat',
'L_PERMISSIONS_VALUE.delete' => 'Mazat',
'L_PERMISSIONS_VALUE.add' => 'Přidávat'
));
?> | jankuca/geecms | langs/cs/core.lang.php | PHP | apache-2.0 | 1,361 |
// Copyright 2017 The Ray Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ray/core_worker/store_provider/memory_store/memory_store.h"
#include "absl/synchronization/mutex.h"
#include "gtest/gtest.h"
#include "ray/common/test_util.h"
namespace ray {
namespace core {
inline std::shared_ptr<ray::LocalMemoryBuffer> MakeBufferFromString(const uint8_t *data,
size_t data_size) {
auto metadata = const_cast<uint8_t *>(data);
auto meta_buffer =
std::make_shared<ray::LocalMemoryBuffer>(metadata, data_size, /*copy_data=*/true);
return meta_buffer;
}
inline std::shared_ptr<ray::LocalMemoryBuffer> MakeLocalMemoryBufferFromString(
const std::string &str) {
return MakeBufferFromString(reinterpret_cast<const uint8_t *>(str.data()), str.size());
}
TEST(TestMemoryStore, TestReportUnhandledErrors) {
std::vector<std::shared_ptr<RayObject>> results;
WorkerContext context(WorkerType::WORKER, WorkerID::FromRandom(), JobID::FromInt(0));
int unhandled_count = 0;
std::shared_ptr<CoreWorkerMemoryStore> provider =
std::make_shared<CoreWorkerMemoryStore>(
nullptr, nullptr, nullptr, [&](const RayObject &obj) { unhandled_count++; });
RayObject obj1(rpc::ErrorType::TASK_EXECUTION_EXCEPTION);
RayObject obj2(rpc::ErrorType::TASK_EXECUTION_EXCEPTION);
auto id1 = ObjectID::FromRandom();
auto id2 = ObjectID::FromRandom();
// Check basic put and get.
ASSERT_TRUE(provider->GetIfExists(id1) == nullptr);
RAY_CHECK(provider->Put(obj1, id1));
RAY_CHECK(provider->Put(obj2, id2));
ASSERT_TRUE(provider->GetIfExists(id1) != nullptr);
ASSERT_EQ(unhandled_count, 0);
// Check delete without get.
provider->Delete({id1, id2});
ASSERT_EQ(unhandled_count, 1);
unhandled_count = 0;
// Check delete after get.
RAY_CHECK(provider->Put(obj1, id1));
RAY_CHECK(provider->Put(obj1, id2));
RAY_UNUSED(provider->Get({id1}, 1, 100, context, false, &results));
RAY_UNUSED(provider->Get({id2}, 1, 100, context, false, &results));
provider->Delete({id1, id2});
ASSERT_EQ(unhandled_count, 0);
// Check delete after async get.
provider->GetAsync({id2}, [](std::shared_ptr<RayObject> obj) {});
RAY_CHECK(provider->Put(obj1, id1));
RAY_CHECK(provider->Put(obj2, id2));
provider->GetAsync({id1}, [](std::shared_ptr<RayObject> obj) {});
provider->Delete({id1, id2});
ASSERT_EQ(unhandled_count, 0);
}
TEST(TestMemoryStore, TestMemoryStoreStats) {
/// Simple validation for test memory store stats.
std::shared_ptr<CoreWorkerMemoryStore> provider =
std::make_shared<CoreWorkerMemoryStore>(nullptr, nullptr, nullptr, nullptr);
// Iterate through the memory store and compare the values that are obtained by
// GetMemoryStoreStatisticalData.
auto fill_expected_memory_stats = [&](MemoryStoreStats &expected_item) {
{
absl::MutexLock lock(&provider->mu_);
for (const auto &it : provider->objects_) {
if (it.second->IsInPlasmaError()) {
expected_item.num_in_plasma += 1;
} else {
expected_item.num_local_objects += 1;
expected_item.used_object_store_memory += it.second->GetSize();
}
}
}
};
RayObject obj1(rpc::ErrorType::OBJECT_IN_PLASMA);
RayObject obj2(rpc::ErrorType::TASK_EXECUTION_EXCEPTION);
RayObject obj3(rpc::ErrorType::TASK_EXECUTION_EXCEPTION);
auto id1 = ObjectID::FromRandom();
auto id2 = ObjectID::FromRandom();
auto id3 = ObjectID::FromRandom();
RAY_CHECK(provider->Put(obj1, id1));
RAY_CHECK(provider->Put(obj2, id2));
RAY_CHECK(provider->Put(obj3, id3));
provider->Delete({id3});
MemoryStoreStats expected_item;
fill_expected_memory_stats(expected_item);
MemoryStoreStats item = provider->GetMemoryStoreStatisticalData();
ASSERT_EQ(item.num_in_plasma, expected_item.num_in_plasma);
ASSERT_EQ(item.num_local_objects, expected_item.num_local_objects);
ASSERT_EQ(item.used_object_store_memory, expected_item.used_object_store_memory);
// Delete all other objects and see if stats are recorded correctly.
provider->Delete({id1, id2});
MemoryStoreStats expected_item2;
fill_expected_memory_stats(expected_item2);
item = provider->GetMemoryStoreStatisticalData();
ASSERT_EQ(item.num_in_plasma, expected_item2.num_in_plasma);
ASSERT_EQ(item.num_local_objects, expected_item2.num_local_objects);
ASSERT_EQ(item.used_object_store_memory, expected_item2.used_object_store_memory);
RAY_CHECK(provider->Put(obj1, id1));
RAY_CHECK(provider->Put(obj2, id2));
RAY_CHECK(provider->Put(obj3, id3));
MemoryStoreStats expected_item3;
fill_expected_memory_stats(expected_item3);
item = provider->GetMemoryStoreStatisticalData();
ASSERT_EQ(item.num_in_plasma, expected_item3.num_in_plasma);
ASSERT_EQ(item.num_local_objects, expected_item3.num_local_objects);
ASSERT_EQ(item.used_object_store_memory, expected_item3.used_object_store_memory);
}
/// A mock manager that manages all test buffers. This mocks
/// that memory pressure is able to be awared.
class MockBufferManager {
public:
int64_t GetBuferPressureInBytes() const { return buffer_pressure_in_bytes_; }
void AcquireMemory(int64_t sz) { buffer_pressure_in_bytes_ += sz; }
void ReleaseMemory(int64_t sz) { buffer_pressure_in_bytes_ -= sz; }
private:
int64_t buffer_pressure_in_bytes_ = 0;
};
class TestBuffer : public Buffer {
public:
explicit TestBuffer(MockBufferManager &manager, std::string data)
: manager_(manager), data_(std::move(data)) {}
uint8_t *Data() const override {
return reinterpret_cast<uint8_t *>(const_cast<char *>(data_.data()));
}
size_t Size() const override { return data_.size(); }
bool OwnsData() const override { return true; }
bool IsPlasmaBuffer() const override { return false; }
const MockBufferManager &GetBufferManager() const { return manager_; }
private:
MockBufferManager &manager_;
std::string data_;
};
TEST(TestMemoryStore, TestObjectAllocator) {
MockBufferManager mock_buffer_manager;
auto my_object_allocator = [&mock_buffer_manager](const ray::RayObject &object,
const ObjectID &object_id) {
auto buf = object.GetData();
mock_buffer_manager.AcquireMemory(buf->Size());
auto data_factory = [&mock_buffer_manager, object]() -> std::shared_ptr<ray::Buffer> {
auto buf = object.GetData();
std::string data(reinterpret_cast<char *>(buf->Data()), buf->Size());
return std::make_shared<TestBuffer>(mock_buffer_manager, data);
};
return std::make_shared<ray::RayObject>(object.GetMetadata(), object.GetNestedRefs(),
std::move(data_factory), /*copy_data=*/true);
};
std::shared_ptr<CoreWorkerMemoryStore> memory_store =
std::make_shared<CoreWorkerMemoryStore>(nullptr, nullptr, nullptr, nullptr,
std::move(my_object_allocator));
const int32_t max_rounds = 1000;
const std::string hello = "hello";
for (auto i = 0; i < max_rounds; ++i) {
auto hello_buffer = MakeLocalMemoryBufferFromString(hello);
std::vector<rpc::ObjectReference> nested_refs;
auto hello_object =
std::make_shared<ray::RayObject>(hello_buffer, nullptr, nested_refs, true);
memory_store->Put(*hello_object, ObjectID::FromRandom());
}
ASSERT_EQ(max_rounds * hello.size(), mock_buffer_manager.GetBuferPressureInBytes());
}
} // namespace core
} // namespace ray
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| ray-project/ray | src/ray/core_worker/test/memory_store_test.cc | C++ | apache-2.0 | 8,128 |
//>>built
define("dojox/editor/plugins/nls/it/Blockquote",{blockquote:"Blockquote"});
//# sourceMappingURL=Blockquote.js.map | Caspar12/zh.sw | zh.web.site.admin/src/main/resources/static/js/dojo/dojox/editor/plugins/nls/it/Blockquote.js | JavaScript | apache-2.0 | 124 |
package base.widget;
import android.content.Context;
import android.util.AttributeSet;
import io.vov.vitamio.widget.VideoView;
/**
* Created by shsun on 17/2/25.
*/
public class XBaseVitamioVideoView extends VideoView {
public XBaseVitamioVideoView(Context context) {
super(context);
}
public XBaseVitamioVideoView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public XBaseVitamioVideoView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int measuredWidth = getDefaultSize(this.getVideoWidth(), widthMeasureSpec);
int measuredHeight = getDefaultSize(this.getVideoHeight(), heightMeasureSpec);
if ((this.getVideoWidth() > 0) && (this.getVideoHeight() > 0)) {
if (this.getVideoWidth() * measuredHeight > measuredWidth * this.getVideoHeight()) {
measuredHeight = measuredWidth * this.getVideoHeight() / this.getVideoWidth();
} else if (this.getVideoWidth() * measuredHeight < measuredWidth * this.getVideoHeight()) {
measuredWidth = measuredHeight * this.getVideoWidth() / this.getVideoHeight();
}
}
setMeasuredDimension(measuredWidth, measuredHeight);
}
}
| shsun/AndroidMediaCache | shsunframework/src/main/java/base/widget/XBaseVitamioVideoView.java | Java | apache-2.0 | 1,373 |
<?php
namespace Mtg\Controller;
use Mtg\Model\Repository\RuleRepository;
use Zend\Mvc\Controller\AbstractRestfulController;
use Zend\View\Model\JsonModel;
class RuleController extends AbstractRestfulController
{
/**
* @var RuleRepository
*/
protected $ruleRepository;
/**
* @param RuleRepository $ruleRepository
*/
public function __construct(RuleRepository $ruleRepository)
{
$this->ruleRepository = $ruleRepository;
}
public function get($id)
{
/* @var $rule \Mtg\Model\Rule */
$rule = $this->ruleRepository->find($id);
if (!$rule) {
return new JsonModel(array(
'error' => sprintf('Can\'t find rule "%s"', $id),
));
}
$childIdList = array();
foreach ($rule->getChildRules() as $childRule) {
$childIdList[] = $childRule->getId();
}
return new JsonModel(array(
'id' => $rule->getId(),
'parent_id' => $rule->getParentRule()->getId(),
'sub_id' => $rule->getSubId(),
'child_rules' => $childIdList,
'ruletext' => $rule->getRuletext(),
));
}
public function searchAction()
{
$token = $this->params('token');
if (!$token) {
return new JsonModel(array(
'error' => 'Search token can\'t be empty',
));
} elseif (strlen($token) < 4) {
return new JsonModel(array(
'error' => 'Search token must be at least 4 characters long',
));
}
$result = array();
$rules = $this->ruleRepository->findByFulltextSearch($token);
/* @var $rule \Mtg\Model\Rule */
foreach ($rules as $rule) {
$childIdList = array();
foreach ($rule->getChildRules() as $childRule) {
$childIdList[] = $childRule->getId();
}
$result[] = array(
'id' => $rule->getId(),
'parent_id' => $rule->getParentRule()->getId(),
'sub_id' => $rule->getSubId(),
'child_rules' => $childIdList,
'ruletext' => $rule->getRuletext(),
);
}
return new JsonModel($result);
}
}
| BreyndotEchse/mtg-bot | module/Mtg/src/Mtg/Controller/RuleController.php | PHP | apache-2.0 | 2,291 |
<?php
// $Id$
require_once(TANGRA_MAIN_DIR.'web_site/redirect_composer_local.class.php');
require_once(DIR_INC.'modules/jabba_ajax_form_ctrl/jabba_ajax_form_ctrl_with_object.class.php');
require_once(DIR_INC.'modules/admin_panel/classes/admin_user/admin_user_dbc.class.php');
abstract class Site_Admin_Ajax_Form_Ctrl_With_Object_UA extends Jabba_Ajax_Form_Ctrl_With_Object {
private $ua_config;
protected $user;
function __construct($tpl_file = '', $ua_config) {
parent::__construct($tpl_file);
$this->set_ua_config($ua_config);
}
public function set_ua_config($ua_config) {
$this->ua_config = $ua_config;
}
public function get_ua_config() {
return $this->ua_config;
}
public function _run() {
$user_auth_config = $this->get_ua_config();
$svm = $this->get_svm();
if ($svm->is_global_var_registered($user_auth_config['session_var_name'])) {
$user = $svm->get_global_var($user_auth_config['session_var_name']);
if ($user instanceof User) {
$this->user = $user;
$_transit_vars_prepared = $this->prepare_transit_vars();
$this->export('_transit_vars', $_transit_vars_prepared, true);
$this->run();
} else {
$this->send_redirect_command();
}
} else {
$this->send_redirect_command();
}
}
public function get_user() {
return $this->user;
}
private function send_redirect_command() {
$user_auth_config = $this->get_ua_config();
$redir_composer = new Redirect_Composer_Local($this->get__context(), $user_auth_config['login_page']);
$context = $this->get__context();
$location = $redir_composer->get_target_address($context);
$rez = array('result_code' => Ajax_Ctrl::STATUS_OK, 'command' => Ajax_Ctrl::COMMAND_REDIRECT, 'command_value' => $location);
$this->send_response($rez);
}
} | ogrebgr/forge-server-skeleton-php | hidden/inc/modules/admin_panel/site_admin_ajax_form_ctrl_with_object_ua.class.php | PHP | apache-2.0 | 1,781 |
var respecConfig = {
specStatus: "ED",
editors: [{
name: "Simon Cox",
company: "CSIRO",
companyURL: "http://www.csiro.au/",
w3cid: 1796
}],
otherLinks: [{
key: "Contributors",
data: [
{ value: "Peter Brenton" },
]
}],
shortName: "vocab-project",
edDraftURI: "http://dr-shorthair.github.io/project-ont/docs/",
// wg: "Data eXchange Working Group",
// wgURI: "https://www.w3.org/2017/dxwg/",
// wgPublicList: "public-dxwg-comments",
// wgPatentURI: "https://www.w3.org/2004/01/pp-impl/75471/status",
noRecTrack: true,
overrideCopyright: "<p class='copyright'><a href='https://www.w3.org/Consortium/Legal/ipr-notice#Copyright'>Copyright</a> © 2017 <a href='http://www.csiro.au'><abbr title='Commonwealth Scientific and Industrial Organisation'>CSIRO</abbr></a> & <a href='https://www.w3.org/'> <abbr title='World Wide Web Consortium'>W3C</abbr> </a><sup>®</sup> , <abbr title='World Wide Web Consortium'>W3C</abbr> <a href='https://www.w3.org/Consortium/Legal/ipr-notice#Legal_Disclaimer'>liability</a>, <a href='https://www.w3.org/Consortium/Legal/ipr-notice#W3C_Trademarks'>trademark</a> and <a href='https://www.w3.org/Consortium/Legal/copyright-documents'>document use</a> rules apply.</p>",
localBiblio: {
"schema-org":{
href:"https://schema.org/",
title:"Schema.org"
},
"dbpedia-ont":{
href:"http://dbpedia.org/ontology/",
title:"DBPedia ontology"
},
"doap":{
href:"https://github.com/ewilderj/doap/wiki",
title:"Description of a Project",
authors: ["Edd Wilder-James"]
},
"frapo":{
href:"http://www.sparontologies.net/ontologies/frapo",
title:"FRAPO, the Funding, Research Administration and Projects Ontology",
authors: ["David Shotton"],
date: "04 September 2017"
},
"obo":{
href:"http://www.obofoundry.org/",
title:"The OBO Foundry"
},
"pdo":{
href:"http://vocab.deri.ie/pdo",
title:"Project Documents Ontology",
authors: ["Pradeep Varma"],
date: "09 July 2010"
},
"vivo-isf":{
href:"http://github.com/vivo-isf/vivo-isf",
title:"VIVO-ISF Data Standard"
}
},
issueBase: "https://github.com/dr-shorthair/project-ont/issues"
};
| dr-shorthair/ont | docs/projconfig.js | JavaScript | apache-2.0 | 2,270 |
namespace CloudFoundry.Net.Vmc.Cli
{
using System;
using System.Collections.Generic;
using System.IO;
static partial class Program
{
static bool Push(IList<string> unparsed)
{
// TODO match ruby argument parsing
if (unparsed.Count < 3 || unparsed.Count > 4)
{
Console.Error.WriteLine("Usage: vmc push <appname> <path> <url> [service] --instances N --mem MB"); // TODO usage statement standardization
return false;
}
string appname = unparsed[0];
string path = unparsed[1];
string fqdn = unparsed[2];
string[] serviceNames = null;
if (unparsed.Count == 4)
{
serviceNames = new[] { unparsed[3] };
}
DirectoryInfo di = null;
if (Directory.Exists(path))
{
di = new DirectoryInfo(path);
}
else
{
Console.Error.WriteLine(String.Format("Directory '{0}' does not exist."));
return false;
}
IVcapClient vc = new VcapClient();
VcapClientResult rv = vc.Push(appname, fqdn, instances, di, memoryMB, serviceNames);
if (false == rv.Success)
{
Console.Error.WriteLine(rv.Message);
}
return rv.Success;
}
}
} | ademar/ironfoundry | src/CloudFoundry.Net.Vmc.Cli/Push.cs | C# | apache-2.0 | 1,501 |
/*
* Copyright (c) 2018-2020, FusionAuth, All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package io.fusionauth.der;
import java.util.Objects;
/**
* This object models a ASN.1 DER Tag.
*
* @author Daniel DeGroff
*/
public class Tag {
/**
* Bit String Tag
*/
public static final int BitString = 3;
/**
* Integer Tag
*/
public static final int Integer = 2;
/**
* Null Tag
*/
public static final int Null = 5;
/**
* Object Identifier Tag
*/
public static final int ObjectIdentifier = 6;
/**
* Octet String Tag
*/
public static final int OctetString = 4;
/**
* PrintableString Tag
* <p>
* 19 decimal, 0x13 hex
* </p>
*/
public static final int PrintableString = 19;
/**
* Sequence Tag
* <p>
* 16 decimal, 0x10 hex, 0b00010000 binary
* </p>
* Because the Sequence tag is always in a constructed form (not primitive), the tag will present as <code>0x30</code> because
* the 6th bit is a <code>1</code> indicating a constructed form. So the raw sequence of <code>0b00010000</code> becomes
* <code>0b00110000</code> which is <code>48</code> decimal.
*/
public static final int Sequence = 48;
/**
* Set and Set of
* <p>
* 17 decimal, 0x11 hex
* </p>
*/
public static final int Set = 17;
/**
* UTCTime Tag
* <p>
* 23 decimal, 0x17 hex
* </p>
*/
public static final int UTCTime = 23;
/**
* True if this Tag is primitive. False if this Tag is constructed.
*/
public final boolean primitive;
/**
* The raw byte read from the DER encoded array. This byte includes the class, form and tag number.
*/
public final byte rawByte;
/**
* The class of this tag read from bits 8 and 7 of the raw byte.
*/
public final TagClass tagClass;
/**
* The tag value in decimal. This value will only represent the decimal value of bits 5 to 1.
*
* <p>
* For example, if this is a sequence tag, this value will be <code>16</code> and you should expect <code>primitive</code>
* to be false. If you want the raw byte which will be <code>48</code> or <code>0x30</code> you can read <code>rawByte</code>.
* </p>
*/
public final int value;
/**
* Construct a new tag from the tag byte in the DER byte array. The following depicts the layout of the tag byte.
*
* <pre>
* ---------------------------------------------------------
* | b8 | b7 | b6 | b5 | b4 | b3 | b2 | b1 |
* ---------------------------------------------------------
* |______| | |___________________________|
* | | |
* | |-- [0] Primitive |
* | |-- [1] Constructed |
* | Tag Number (value)
* | Class
* |---------------------------
* |-- 0 0 Universal
* |-- 0 1 Application
* |-- 1 0 Context Specific
* |-- 1 1 Private
* </pre>
*
* @param value the tag value from the DER byte array
*/
public Tag(int value) {
// Hold the raw value provided
rawByte = (byte) value;
tagClass = setTagClass(value);
// The 6th bit indicates if this tag is primitive or constructed
primitive = (rawByte & 0b00100000) == 0;
// The last 5 bits are the tag
this.value = value & 0b00011111;
}
static String hexString(int value) {
return "0x" + String.format("%02x", value).toUpperCase();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Tag)) return false;
Tag tag = (Tag) o;
return rawByte == tag.rawByte;
}
public String getName() {
switch (rawByte) {
case Integer:
return "Integer";
case BitString:
return "Bit String";
case Null:
return "Null";
case ObjectIdentifier:
return "Object Identifier";
case OctetString:
return "Octet String";
case PrintableString:
return "PrintableString";
case Sequence:
return "Sequence";
case Set:
return "Set";
case UTCTime:
return "UTCTime";
default:
return "Other";
}
}
@Override
public int hashCode() {
return Objects.hash(rawByte);
}
/**
* @param tag a tag
* @return true if this tag has the same value as requested
*/
public boolean is(int tag) {
return value == (tag & 0b00011111);
}
public boolean isConstructed() {
return !primitive;
}
public boolean isPrimitive() {
return primitive;
}
@Override
public String toString() {
if (tagClass == TagClass.ContextSpecific) {
return "[" + value + "]";
}
return value + " [" + getName() + ", " + hexString() + "]";
}
private TagClass setTagClass(int value) {
TagClass tagClass = null;
for (TagClass tc : TagClass.values()) {
if ((value & 0b11000000) == tc.value) {
tagClass = tc;
break;
}
}
if (tagClass == null) {
throw new IllegalArgumentException("Invalid tag value " + value + ", the tag does not appear to fit into one of the expected classes");
}
return tagClass;
}
String hexString() {
return "0x" + String.format("%02x", value).toUpperCase();
}
}
| inversoft/prime-jwt | src/main/java/io/fusionauth/der/Tag.java | Java | apache-2.0 | 5,877 |
import {LogManager} from 'aurelia-framework';
import {HttpClient} from 'aurelia-fetch-client';
import {Router} from 'aurelia-router';
import {Configure} from 'aurelia-configuration';
import 'fetch';
import {Authentication} from './authentication';
let logger = LogManager.getLogger('openbelapi-client');
export class OpenbelApiClient {
client;
openbelApiUrl;
static inject = [Authentication, Router, Configure];
constructor(auth, router, config) {
this.auth = auth;
this.router = router;
this.config = config;
this.selectedOpenbelApiUrl = this.getApiUrl();
this.client = this.configureClient(this.selectedOpenbelApiUrl);
}
configureClient(selectedOpenbelApiUrl){
let self = this;
let client = new HttpClient().configure(config => {
config
.withBaseUrl(selectedOpenbelApiUrl.api)
.withDefaults({
credentials: 'same-origin',
headers: {
'Accept': 'application/hal+json',
'X-Requested-With': 'Fetch'
}
})
.rejectErrorResponses()
.withInterceptor({
request(req) {
logger.debug(`Requesting ${req.method} ${req.url}`);
// If id_token exists as a query param - save it
// TODO - rework this to check for id_token first
// TODO - update the location directly instead of redirecting in Aurelia
let urlParams = location.href.split(/[?&#]/).slice(1).map(function(paramPair) {
return paramPair.split(/=(.+)?/).slice(0, 2);
}).reduce(function (obj, pairArray) {
obj[pairArray[0]] = pairArray[1];
return obj;
}, {});
let token = self.auth.getToken();
req.headers.append('Authorization', 'Bearer ' + token);
return req; // you can return a modified Request, or you can short-circuit the request by returning a Response
},
response(resp) {
logger.debug(`Received ${resp.status} ${resp.url}`);
if (resp.status === 401) {
let rejection = Promise.reject(resp);
return rejection;
}
return resp; // you can return a modified Response
},
responseError(resp) {
if (resp.status === 401) {
logger.info('Backend returned HTTP 401, redirecting to loginUrl.');
// window.location.href = window.location.origin;
self.auth.authenticate(window.location.protocol, window.location.host, window.location.pathname, window.location.hash);
}
logger.debug(`Received ${resp.status} ${resp.url}`);
let rejection = Promise.reject(resp);
return rejection;
}
});
});
return client;
}
getApiUrl() {
let openbelApiUrls = this.config.get('openbelApiUrls');
let selectedOpenbelApiUrl = JSON.parse(localStorage.getItem('selectedAPI'));
if (! selectedOpenbelApiUrl) {
localStorage.setItem('selectedAPI', JSON.stringify(openbelApiUrls[0]));
return openbelApiUrls[0];
}
return selectedOpenbelApiUrl;
}
}
| OpenBEL/belmgr | plugin/src/resources/openbelapi-client.js | JavaScript | apache-2.0 | 3,666 |
/*
* Copyright 2008 Web Cohesion
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webcohesion.ofx4j.domain.data.signup;
import com.webcohesion.ofx4j.domain.data.TransactionWrappedResponseMessage;
import com.webcohesion.ofx4j.meta.Aggregate;
import com.webcohesion.ofx4j.meta.ChildAggregate;
/**
* @author Ryan Heaton
*/
@Aggregate ( "ACCTINFOTRNRS" )
public class AccountInfoResponseTransaction extends TransactionWrappedResponseMessage<AccountInfoResponse> {
private AccountInfoResponse message;
/**
* The wrapped message.
*
* @return The wrapped message.
*/
@ChildAggregate ( required = false, order = 30 )
public AccountInfoResponse getMessage() {
return message;
}
/**
* The wrapped message.
*
* @param message The wrapped message.
*/
public void setMessage(AccountInfoResponse message) {
this.message = message;
}
// Inherited.
public AccountInfoResponse getWrappedMessage() {
return getMessage();
}
} | stoicflame/ofx4j | src/main/java/com/webcohesion/ofx4j/domain/data/signup/AccountInfoResponseTransaction.java | Java | apache-2.0 | 1,497 |
<?php
namespace Aruna\Webmention;
/**
* Class VerifyWebmentionRequest
* @author yourname
*/
class VerifyWebmentionRequest
{
public function __invoke(array $mention)
{
if (!isset($mention['source']) || !isset($mention['target'])) {
$m = "Missing source or target";
throw new \Exception($m);
}
if ($mention['source'] == $mention['target']) {
$m = "source and target are the same url";
throw new \Exception($m);
}
if (!$this->validateUrl($mention['source'])) {
$m = sprintf("source url is not valid [%s]", $mention['source']);
throw new \Exception($m);
}
if (!$this->validateUrl($mention['target'])) {
$m = sprintf("target url is not valid [%s]", $mention['target']);
throw new \Exception($m);
}
if ($this->validateTarget($mention['target'])) {
$m = sprintf("target url is not valid [%s]", $mention['target']);
throw new \Exception($m);
}
return $mention;
}
private function validateTarget($url)
{
$url = parse_url($url);
if ($url['host'] == "j4y.co") {
return false;
}
return true;
}
private function validateUrl($url)
{
$url = parse_url($url);
if (!isset($url['scheme'])) {
return false;
}
if (!isset($url['host'])) {
return false;
}
if ($url['scheme'] != "http" && $url['scheme'] != "https") {
return false;
}
return true;
}
}
| j4y-funabashi/aruna | app/src/Webmention/VerifyWebmentionRequest.php | PHP | apache-2.0 | 1,624 |
#!/usr/bin/env python
import unittest
from socket import AF_INET6
from framework import VppTestCase, VppTestRunner
from vpp_sub_interface import VppSubInterface, VppDot1QSubint
from vpp_pg_interface import is_ipv6_misc
from vpp_ip_route import VppIpRoute, VppRoutePath, find_route, VppIpMRoute, \
VppMRoutePath, MRouteItfFlags, MRouteEntryFlags
from vpp_neighbor import find_nbr, VppNeighbor
from scapy.packet import Raw
from scapy.layers.l2 import Ether, Dot1Q
from scapy.layers.inet6 import IPv6, UDP, ICMPv6ND_NS, ICMPv6ND_RS, \
ICMPv6ND_RA, ICMPv6NDOptSrcLLAddr, getmacbyip6, ICMPv6MRD_Solicitation, \
ICMPv6NDOptMTU, ICMPv6NDOptSrcLLAddr, ICMPv6NDOptPrefixInfo, \
ICMPv6ND_NA, ICMPv6NDOptDstLLAddr, ICMPv6DestUnreach, icmp6types
from util import ppp
from scapy.utils6 import in6_getnsma, in6_getnsmac, in6_ptop, in6_islladdr, \
in6_mactoifaceid, in6_ismaddr
from scapy.utils import inet_pton, inet_ntop
def mk_ll_addr(mac):
euid = in6_mactoifaceid(mac)
addr = "fe80::" + euid
return addr
class TestIPv6ND(VppTestCase):
def validate_ra(self, intf, rx, dst_ip=None):
if not dst_ip:
dst_ip = intf.remote_ip6
# unicasted packets must come to the unicast mac
self.assertEqual(rx[Ether].dst, intf.remote_mac)
# and from the router's MAC
self.assertEqual(rx[Ether].src, intf.local_mac)
# the rx'd RA should be addressed to the sender's source
self.assertTrue(rx.haslayer(ICMPv6ND_RA))
self.assertEqual(in6_ptop(rx[IPv6].dst),
in6_ptop(dst_ip))
# and come from the router's link local
self.assertTrue(in6_islladdr(rx[IPv6].src))
self.assertEqual(in6_ptop(rx[IPv6].src),
in6_ptop(mk_ll_addr(intf.local_mac)))
def validate_na(self, intf, rx, dst_ip=None, tgt_ip=None):
if not dst_ip:
dst_ip = intf.remote_ip6
if not tgt_ip:
dst_ip = intf.local_ip6
# unicasted packets must come to the unicast mac
self.assertEqual(rx[Ether].dst, intf.remote_mac)
# and from the router's MAC
self.assertEqual(rx[Ether].src, intf.local_mac)
# the rx'd NA should be addressed to the sender's source
self.assertTrue(rx.haslayer(ICMPv6ND_NA))
self.assertEqual(in6_ptop(rx[IPv6].dst),
in6_ptop(dst_ip))
# and come from the target address
self.assertEqual(in6_ptop(rx[IPv6].src), in6_ptop(tgt_ip))
# Dest link-layer options should have the router's MAC
dll = rx[ICMPv6NDOptDstLLAddr]
self.assertEqual(dll.lladdr, intf.local_mac)
def send_and_expect_ra(self, intf, pkts, remark, dst_ip=None,
filter_out_fn=is_ipv6_misc):
intf.add_stream(pkts)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = intf.get_capture(1, filter_out_fn=filter_out_fn)
self.assertEqual(len(rx), 1)
rx = rx[0]
self.validate_ra(intf, rx, dst_ip)
def send_and_assert_no_replies(self, intf, pkts, remark):
intf.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
intf.assert_nothing_captured(remark=remark)
class TestIPv6(TestIPv6ND):
""" IPv6 Test Case """
@classmethod
def setUpClass(cls):
super(TestIPv6, cls).setUpClass()
def setUp(self):
"""
Perform test setup before test case.
**Config:**
- create 3 pg interfaces
- untagged pg0 interface
- Dot1Q subinterface on pg1
- Dot1AD subinterface on pg2
- setup interfaces:
- put it into UP state
- set IPv6 addresses
- resolve neighbor address using NDP
- configure 200 fib entries
:ivar list interfaces: pg interfaces and subinterfaces.
:ivar dict flows: IPv4 packet flows in test.
:ivar list pg_if_packet_sizes: packet sizes in test.
*TODO:* Create AD sub interface
"""
super(TestIPv6, self).setUp()
# create 3 pg interfaces
self.create_pg_interfaces(range(3))
# create 2 subinterfaces for p1 and pg2
self.sub_interfaces = [
VppDot1QSubint(self, self.pg1, 100),
VppDot1QSubint(self, self.pg2, 200)
# TODO: VppDot1ADSubint(self, self.pg2, 200, 300, 400)
]
# packet flows mapping pg0 -> pg1.sub, pg2.sub, etc.
self.flows = dict()
self.flows[self.pg0] = [self.pg1.sub_if, self.pg2.sub_if]
self.flows[self.pg1.sub_if] = [self.pg0, self.pg2.sub_if]
self.flows[self.pg2.sub_if] = [self.pg0, self.pg1.sub_if]
# packet sizes
self.pg_if_packet_sizes = [64, 512, 1518, 9018]
self.sub_if_packet_sizes = [64, 512, 1518 + 4, 9018 + 4]
self.interfaces = list(self.pg_interfaces)
self.interfaces.extend(self.sub_interfaces)
# setup all interfaces
for i in self.interfaces:
i.admin_up()
i.config_ip6()
i.resolve_ndp()
# config 2M FIB entries
self.config_fib_entries(200)
def tearDown(self):
"""Run standard test teardown and log ``show ip6 neighbors``."""
for i in self.sub_interfaces:
i.unconfig_ip6()
i.ip6_disable()
i.admin_down()
i.remove_vpp_config()
super(TestIPv6, self).tearDown()
if not self.vpp_dead:
self.logger.info(self.vapi.cli("show ip6 neighbors"))
# info(self.vapi.cli("show ip6 fib")) # many entries
def config_fib_entries(self, count):
"""For each interface add to the FIB table *count* routes to
"fd02::1/128" destination with interface's local address as next-hop
address.
:param int count: Number of FIB entries.
- *TODO:* check if the next-hop address shouldn't be remote address
instead of local address.
"""
n_int = len(self.interfaces)
percent = 0
counter = 0.0
dest_addr = inet_pton(AF_INET6, "fd02::1")
dest_addr_len = 128
for i in self.interfaces:
next_hop_address = i.local_ip6n
for j in range(count / n_int):
self.vapi.ip_add_del_route(
dest_addr, dest_addr_len, next_hop_address, is_ipv6=1)
counter += 1
if counter / count * 100 > percent:
self.logger.info("Configure %d FIB entries .. %d%% done" %
(count, percent))
percent += 1
def create_stream(self, src_if, packet_sizes):
"""Create input packet stream for defined interface.
:param VppInterface src_if: Interface to create packet stream for.
:param list packet_sizes: Required packet sizes.
"""
pkts = []
for i in range(0, 257):
dst_if = self.flows[src_if][i % 2]
info = self.create_packet_info(src_if, dst_if)
payload = self.info_to_payload(info)
p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
IPv6(src=src_if.remote_ip6, dst=dst_if.remote_ip6) /
UDP(sport=1234, dport=1234) /
Raw(payload))
info.data = p.copy()
if isinstance(src_if, VppSubInterface):
p = src_if.add_dot1_layer(p)
size = packet_sizes[(i // 2) % len(packet_sizes)]
self.extend_packet(p, size)
pkts.append(p)
return pkts
def verify_capture(self, dst_if, capture):
"""Verify captured input packet stream for defined interface.
:param VppInterface dst_if: Interface to verify captured packet stream
for.
:param list capture: Captured packet stream.
"""
self.logger.info("Verifying capture on interface %s" % dst_if.name)
last_info = dict()
for i in self.interfaces:
last_info[i.sw_if_index] = None
is_sub_if = False
dst_sw_if_index = dst_if.sw_if_index
if hasattr(dst_if, 'parent'):
is_sub_if = True
for packet in capture:
if is_sub_if:
# Check VLAN tags and Ethernet header
packet = dst_if.remove_dot1_layer(packet)
self.assertTrue(Dot1Q not in packet)
try:
ip = packet[IPv6]
udp = packet[UDP]
payload_info = self.payload_to_info(str(packet[Raw]))
packet_index = payload_info.index
self.assertEqual(payload_info.dst, dst_sw_if_index)
self.logger.debug(
"Got packet on port %s: src=%u (id=%u)" %
(dst_if.name, payload_info.src, packet_index))
next_info = self.get_next_packet_info_for_interface2(
payload_info.src, dst_sw_if_index,
last_info[payload_info.src])
last_info[payload_info.src] = next_info
self.assertTrue(next_info is not None)
self.assertEqual(packet_index, next_info.index)
saved_packet = next_info.data
# Check standard fields
self.assertEqual(ip.src, saved_packet[IPv6].src)
self.assertEqual(ip.dst, saved_packet[IPv6].dst)
self.assertEqual(udp.sport, saved_packet[UDP].sport)
self.assertEqual(udp.dport, saved_packet[UDP].dport)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
for i in self.interfaces:
remaining_packet = self.get_next_packet_info_for_interface2(
i.sw_if_index, dst_sw_if_index, last_info[i.sw_if_index])
self.assertTrue(remaining_packet is None,
"Interface %s: Packet expected from interface %s "
"didn't arrive" % (dst_if.name, i.name))
def test_fib(self):
""" IPv6 FIB test
Test scenario:
- Create IPv6 stream for pg0 interface
- Create IPv6 tagged streams for pg1's and pg2's subinterface.
- Send and verify received packets on each interface.
"""
pkts = self.create_stream(self.pg0, self.pg_if_packet_sizes)
self.pg0.add_stream(pkts)
for i in self.sub_interfaces:
pkts = self.create_stream(i, self.sub_if_packet_sizes)
i.parent.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
pkts = self.pg0.get_capture()
self.verify_capture(self.pg0, pkts)
for i in self.sub_interfaces:
pkts = i.parent.get_capture()
self.verify_capture(i, pkts)
def test_ns(self):
""" IPv6 Neighbour Solicitation Exceptions
Test scenario:
- Send an NS Sourced from an address not covered by the link sub-net
- Send an NS to an mcast address the router has not joined
- Send NS for a target address the router does not onn.
"""
#
# An NS from a non link source address
#
nsma = in6_getnsma(inet_pton(AF_INET6, self.pg0.local_ip6))
d = inet_ntop(AF_INET6, nsma)
p = (Ether(dst=in6_getnsmac(nsma)) /
IPv6(dst=d, src="2002::2") /
ICMPv6ND_NS(tgt=self.pg0.local_ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0.remote_mac))
pkts = [p]
self.send_and_assert_no_replies(
self.pg0, pkts,
"No response to NS source by address not on sub-net")
#
# An NS for sent to a solicited mcast group the router is
# not a member of FAILS
#
if 0:
nsma = in6_getnsma(inet_pton(AF_INET6, "fd::ffff"))
d = inet_ntop(AF_INET6, nsma)
p = (Ether(dst=in6_getnsmac(nsma)) /
IPv6(dst=d, src=self.pg0.remote_ip6) /
ICMPv6ND_NS(tgt=self.pg0.local_ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0.remote_mac))
pkts = [p]
self.send_and_assert_no_replies(
self.pg0, pkts,
"No response to NS sent to unjoined mcast address")
#
# An NS whose target address is one the router does not own
#
nsma = in6_getnsma(inet_pton(AF_INET6, self.pg0.local_ip6))
d = inet_ntop(AF_INET6, nsma)
p = (Ether(dst=in6_getnsmac(nsma)) /
IPv6(dst=d, src=self.pg0.remote_ip6) /
ICMPv6ND_NS(tgt="fd::ffff") /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0.remote_mac))
pkts = [p]
self.send_and_assert_no_replies(self.pg0, pkts,
"No response to NS for unknown target")
#
# A neighbor entry that has no associated FIB-entry
#
self.pg0.generate_remote_hosts(4)
nd_entry = VppNeighbor(self,
self.pg0.sw_if_index,
self.pg0.remote_hosts[2].mac,
self.pg0.remote_hosts[2].ip6,
af=AF_INET6,
is_no_fib_entry=1)
nd_entry.add_vpp_config()
#
# check we have the neighbor, but no route
#
self.assertTrue(find_nbr(self,
self.pg0.sw_if_index,
self.pg0._remote_hosts[2].ip6,
inet=AF_INET6))
self.assertFalse(find_route(self,
self.pg0._remote_hosts[2].ip6,
128,
inet=AF_INET6))
def validate_ra(self, intf, rx, dst_ip=None, mtu=9000, pi_opt=None):
if not dst_ip:
dst_ip = intf.remote_ip6
# unicasted packets must come to the unicast mac
self.assertEqual(rx[Ether].dst, intf.remote_mac)
# and from the router's MAC
self.assertEqual(rx[Ether].src, intf.local_mac)
# the rx'd RA should be addressed to the sender's source
self.assertTrue(rx.haslayer(ICMPv6ND_RA))
self.assertEqual(in6_ptop(rx[IPv6].dst),
in6_ptop(dst_ip))
# and come from the router's link local
self.assertTrue(in6_islladdr(rx[IPv6].src))
self.assertEqual(in6_ptop(rx[IPv6].src),
in6_ptop(mk_ll_addr(intf.local_mac)))
# it should contain the links MTU
ra = rx[ICMPv6ND_RA]
self.assertEqual(ra[ICMPv6NDOptMTU].mtu, mtu)
# it should contain the source's link layer address option
sll = ra[ICMPv6NDOptSrcLLAddr]
self.assertEqual(sll.lladdr, intf.local_mac)
if not pi_opt:
# the RA should not contain prefix information
self.assertFalse(ra.haslayer(ICMPv6NDOptPrefixInfo))
else:
raos = rx.getlayer(ICMPv6NDOptPrefixInfo, 1)
# the options are nested in the scapy packet in way that i cannot
# decipher how to decode. this 1st layer of option always returns
# nested classes, so a direct obj1=obj2 comparison always fails.
# however, the getlayer(.., 2) does give one instnace.
# so we cheat here and construct a new opt instnace for comparison
rd = ICMPv6NDOptPrefixInfo(prefixlen=raos.prefixlen,
prefix=raos.prefix,
L=raos.L,
A=raos.A)
if type(pi_opt) is list:
for ii in range(len(pi_opt)):
self.assertEqual(pi_opt[ii], rd)
rd = rx.getlayer(ICMPv6NDOptPrefixInfo, ii+2)
else:
self.assertEqual(pi_opt, raos)
def send_and_expect_ra(self, intf, pkts, remark, dst_ip=None,
filter_out_fn=is_ipv6_misc,
opt=None):
intf.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = intf.get_capture(1, filter_out_fn=filter_out_fn)
self.assertEqual(len(rx), 1)
rx = rx[0]
self.validate_ra(intf, rx, dst_ip, pi_opt=opt)
def test_rs(self):
""" IPv6 Router Solicitation Exceptions
Test scenario:
"""
#
# Before we begin change the IPv6 RA responses to use the unicast
# address - that way we will not confuse them with the periodic
# RAs which go to the mcast address
# Sit and wait for the first periodic RA.
#
# TODO
#
self.pg0.ip6_ra_config(send_unicast=1)
#
# An RS from a link source address
# - expect an RA in return
#
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(dst=self.pg0.local_ip6, src=self.pg0.remote_ip6) /
ICMPv6ND_RS())
pkts = [p]
self.send_and_expect_ra(self.pg0, pkts, "Genuine RS")
#
# For the next RS sent the RA should be rate limited
#
self.send_and_assert_no_replies(self.pg0, pkts, "RA rate limited")
#
# When we reconfiure the IPv6 RA config, we reset the RA rate limiting,
# so we need to do this before each test below so as not to drop
# packets for rate limiting reasons. Test this works here.
#
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, pkts, "Rate limit reset RS")
#
# An RS sent from a non-link local source
#
self.pg0.ip6_ra_config(send_unicast=1)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(dst=self.pg0.local_ip6, src="2002::ffff") /
ICMPv6ND_RS())
pkts = [p]
self.send_and_assert_no_replies(self.pg0, pkts,
"RS from non-link source")
#
# Source an RS from a link local address
#
self.pg0.ip6_ra_config(send_unicast=1)
ll = mk_ll_addr(self.pg0.remote_mac)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(dst=self.pg0.local_ip6, src=ll) /
ICMPv6ND_RS())
pkts = [p]
self.send_and_expect_ra(self.pg0, pkts,
"RS sourced from link-local",
dst_ip=ll)
#
# Send the RS multicast
#
self.pg0.ip6_ra_config(send_unicast=1)
dmac = in6_getnsmac(inet_pton(AF_INET6, "ff02::2"))
ll = mk_ll_addr(self.pg0.remote_mac)
p = (Ether(dst=dmac, src=self.pg0.remote_mac) /
IPv6(dst="ff02::2", src=ll) /
ICMPv6ND_RS())
pkts = [p]
self.send_and_expect_ra(self.pg0, pkts,
"RS sourced from link-local",
dst_ip=ll)
#
# Source from the unspecified address ::. This happens when the RS
# is sent before the host has a configured address/sub-net,
# i.e. auto-config. Since the sender has no IP address, the reply
# comes back mcast - so the capture needs to not filter this.
# If we happen to pick up the periodic RA at this point then so be it,
# it's not an error.
#
self.pg0.ip6_ra_config(send_unicast=1, suppress=1)
p = (Ether(dst=dmac, src=self.pg0.remote_mac) /
IPv6(dst="ff02::2", src="::") /
ICMPv6ND_RS())
pkts = [p]
self.send_and_expect_ra(self.pg0, pkts,
"RS sourced from unspecified",
dst_ip="ff02::1",
filter_out_fn=None)
#
# Configure The RA to announce the links prefix
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len)
#
# RAs should now contain the prefix information option
#
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=1,
A=1)
self.pg0.ip6_ra_config(send_unicast=1)
ll = mk_ll_addr(self.pg0.remote_mac)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(dst=self.pg0.local_ip6, src=ll) /
ICMPv6ND_RS())
self.send_and_expect_ra(self.pg0, p,
"RA with prefix-info",
dst_ip=ll,
opt=opt)
#
# Change the prefix info to not off-link
# L-flag is clear
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len,
off_link=1)
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=0,
A=1)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix info with L-flag=0",
dst_ip=ll,
opt=opt)
#
# Change the prefix info to not off-link, no-autoconfig
# L and A flag are clear in the advert
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len,
off_link=1,
no_autoconfig=1)
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=0,
A=0)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix info with A & L-flag=0",
dst_ip=ll,
opt=opt)
#
# Change the flag settings back to the defaults
# L and A flag are set in the advert
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len)
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=1,
A=1)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix info",
dst_ip=ll,
opt=opt)
#
# Change the prefix info to not off-link, no-autoconfig
# L and A flag are clear in the advert
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len,
off_link=1,
no_autoconfig=1)
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=0,
A=0)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix info with A & L-flag=0",
dst_ip=ll,
opt=opt)
#
# Use the reset to defults option to revert to defaults
# L and A flag are clear in the advert
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len,
use_default=1)
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=1,
A=1)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix reverted to defaults",
dst_ip=ll,
opt=opt)
#
# Advertise Another prefix. With no L-flag/A-flag
#
self.pg0.ip6_ra_prefix(self.pg1.local_ip6n,
self.pg1.local_ip6_prefix_len,
off_link=1,
no_autoconfig=1)
opt = [ICMPv6NDOptPrefixInfo(prefixlen=self.pg0.local_ip6_prefix_len,
prefix=self.pg0.local_ip6,
L=1,
A=1),
ICMPv6NDOptPrefixInfo(prefixlen=self.pg1.local_ip6_prefix_len,
prefix=self.pg1.local_ip6,
L=0,
A=0)]
self.pg0.ip6_ra_config(send_unicast=1)
ll = mk_ll_addr(self.pg0.remote_mac)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(dst=self.pg0.local_ip6, src=ll) /
ICMPv6ND_RS())
self.send_and_expect_ra(self.pg0, p,
"RA with multiple Prefix infos",
dst_ip=ll,
opt=opt)
#
# Remove the first refix-info - expect the second is still in the
# advert
#
self.pg0.ip6_ra_prefix(self.pg0.local_ip6n,
self.pg0.local_ip6_prefix_len,
is_no=1)
opt = ICMPv6NDOptPrefixInfo(prefixlen=self.pg1.local_ip6_prefix_len,
prefix=self.pg1.local_ip6,
L=0,
A=0)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix reverted to defaults",
dst_ip=ll,
opt=opt)
#
# Remove the second prefix-info - expect no prefix-info i nthe adverts
#
self.pg0.ip6_ra_prefix(self.pg1.local_ip6n,
self.pg1.local_ip6_prefix_len,
is_no=1)
self.pg0.ip6_ra_config(send_unicast=1)
self.send_and_expect_ra(self.pg0, p,
"RA with Prefix reverted to defaults",
dst_ip=ll)
#
# Reset the periodic advertisements back to default values
#
self.pg0.ip6_ra_config(no=1, suppress=1, send_unicast=0)
class IPv6NDProxyTest(TestIPv6ND):
""" IPv6 ND ProxyTest Case """
def setUp(self):
super(IPv6NDProxyTest, self).setUp()
# create 3 pg interfaces
self.create_pg_interfaces(range(3))
# pg0 is the master interface, with the configured subnet
self.pg0.admin_up()
self.pg0.config_ip6()
self.pg0.resolve_ndp()
self.pg1.ip6_enable()
self.pg2.ip6_enable()
def tearDown(self):
super(IPv6NDProxyTest, self).tearDown()
def test_nd_proxy(self):
""" IPv6 Proxy ND """
#
# Generate some hosts in the subnet that we are proxying
#
self.pg0.generate_remote_hosts(8)
nsma = in6_getnsma(inet_pton(AF_INET6, self.pg0.local_ip6))
d = inet_ntop(AF_INET6, nsma)
#
# Send an NS for one of those remote hosts on one of the proxy links
# expect no response since it's from an address that is not
# on the link that has the prefix configured
#
ns_pg1 = (Ether(dst=in6_getnsmac(nsma), src=self.pg1.remote_mac) /
IPv6(dst=d, src=self.pg0._remote_hosts[2].ip6) /
ICMPv6ND_NS(tgt=self.pg0.local_ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0._remote_hosts[2].mac))
self.send_and_assert_no_replies(self.pg1, ns_pg1, "Off link NS")
#
# Add proxy support for the host
#
self.vapi.ip6_nd_proxy(
inet_pton(AF_INET6, self.pg0._remote_hosts[2].ip6),
self.pg1.sw_if_index)
#
# try that NS again. this time we expect an NA back
#
self.pg1.add_stream(ns_pg1)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg1.get_capture(1)
self.validate_na(self.pg1, rx[0],
dst_ip=self.pg0._remote_hosts[2].ip6,
tgt_ip=self.pg0.local_ip6)
#
# ... and that we have an entry in the ND cache
#
self.assertTrue(find_nbr(self,
self.pg1.sw_if_index,
self.pg0._remote_hosts[2].ip6,
inet=AF_INET6))
#
# ... and we can route traffic to it
#
t = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(dst=self.pg0._remote_hosts[2].ip6,
src=self.pg0.remote_ip6) /
UDP(sport=10000, dport=20000) /
Raw('\xa5' * 100))
self.pg0.add_stream(t)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg1.get_capture(1)
rx = rx[0]
self.assertEqual(rx[Ether].dst, self.pg0._remote_hosts[2].mac)
self.assertEqual(rx[Ether].src, self.pg1.local_mac)
self.assertEqual(rx[IPv6].src, t[IPv6].src)
self.assertEqual(rx[IPv6].dst, t[IPv6].dst)
#
# Test we proxy for the host on the main interface
#
ns_pg0 = (Ether(dst=in6_getnsmac(nsma), src=self.pg0.remote_mac) /
IPv6(dst=d, src=self.pg0.remote_ip6) /
ICMPv6ND_NS(tgt=self.pg0._remote_hosts[2].ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0.remote_mac))
self.pg0.add_stream(ns_pg0)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture(1)
self.validate_na(self.pg0, rx[0],
tgt_ip=self.pg0._remote_hosts[2].ip6,
dst_ip=self.pg0.remote_ip6)
#
# Setup and resolve proxy for another host on another interface
#
ns_pg2 = (Ether(dst=in6_getnsmac(nsma), src=self.pg2.remote_mac) /
IPv6(dst=d, src=self.pg0._remote_hosts[3].ip6) /
ICMPv6ND_NS(tgt=self.pg0.local_ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0._remote_hosts[2].mac))
self.vapi.ip6_nd_proxy(
inet_pton(AF_INET6, self.pg0._remote_hosts[3].ip6),
self.pg2.sw_if_index)
self.pg2.add_stream(ns_pg2)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg2.get_capture(1)
self.validate_na(self.pg2, rx[0],
dst_ip=self.pg0._remote_hosts[3].ip6,
tgt_ip=self.pg0.local_ip6)
self.assertTrue(find_nbr(self,
self.pg2.sw_if_index,
self.pg0._remote_hosts[3].ip6,
inet=AF_INET6))
#
# hosts can communicate. pg2->pg1
#
t2 = (Ether(dst=self.pg2.local_mac,
src=self.pg0.remote_hosts[3].mac) /
IPv6(dst=self.pg0._remote_hosts[2].ip6,
src=self.pg0._remote_hosts[3].ip6) /
UDP(sport=10000, dport=20000) /
Raw('\xa5' * 100))
self.pg2.add_stream(t2)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg1.get_capture(1)
rx = rx[0]
self.assertEqual(rx[Ether].dst, self.pg0._remote_hosts[2].mac)
self.assertEqual(rx[Ether].src, self.pg1.local_mac)
self.assertEqual(rx[IPv6].src, t2[IPv6].src)
self.assertEqual(rx[IPv6].dst, t2[IPv6].dst)
#
# remove the proxy configs
#
self.vapi.ip6_nd_proxy(
inet_pton(AF_INET6, self.pg0._remote_hosts[2].ip6),
self.pg1.sw_if_index,
is_del=1)
self.vapi.ip6_nd_proxy(
inet_pton(AF_INET6, self.pg0._remote_hosts[3].ip6),
self.pg2.sw_if_index,
is_del=1)
self.assertFalse(find_nbr(self,
self.pg2.sw_if_index,
self.pg0._remote_hosts[3].ip6,
inet=AF_INET6))
self.assertFalse(find_nbr(self,
self.pg1.sw_if_index,
self.pg0._remote_hosts[2].ip6,
inet=AF_INET6))
#
# no longer proxy-ing...
#
self.send_and_assert_no_replies(self.pg0, ns_pg0, "Proxy unconfigured")
self.send_and_assert_no_replies(self.pg1, ns_pg1, "Proxy unconfigured")
self.send_and_assert_no_replies(self.pg2, ns_pg2, "Proxy unconfigured")
#
# no longer forwarding. traffic generates NS out of the glean/main
# interface
#
self.pg2.add_stream(t2)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture(1)
self.assertTrue(rx[0].haslayer(ICMPv6ND_NS))
class TestIPNull(VppTestCase):
""" IPv6 routes via NULL """
def setUp(self):
super(TestIPNull, self).setUp()
# create 2 pg interfaces
self.create_pg_interfaces(range(1))
for i in self.pg_interfaces:
i.admin_up()
i.config_ip6()
i.resolve_ndp()
def tearDown(self):
super(TestIPNull, self).tearDown()
for i in self.pg_interfaces:
i.unconfig_ip6()
i.admin_down()
def test_ip_null(self):
""" IP NULL route """
p = (Ether(src=self.pg0.remote_mac,
dst=self.pg0.local_mac) /
IPv6(src=self.pg0.remote_ip6, dst="2001::1") /
UDP(sport=1234, dport=1234) /
Raw('\xa5' * 100))
#
# A route via IP NULL that will reply with ICMP unreachables
#
ip_unreach = VppIpRoute(self, "2001::", 64, [], is_unreach=1, is_ip6=1)
ip_unreach.add_vpp_config()
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture(1)
rx = rx[0]
icmp = rx[ICMPv6DestUnreach]
# 0 = "No route to destination"
self.assertEqual(icmp.code, 0)
# ICMP is rate limited. pause a bit
self.sleep(1)
#
# A route via IP NULL that will reply with ICMP prohibited
#
ip_prohibit = VppIpRoute(self, "2001::1", 128, [],
is_prohibit=1, is_ip6=1)
ip_prohibit.add_vpp_config()
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture(1)
rx = rx[0]
icmp = rx[ICMPv6DestUnreach]
# 1 = "Communication with destination administratively prohibited"
self.assertEqual(icmp.code, 1)
class TestIPDisabled(VppTestCase):
""" IPv6 disabled """
def setUp(self):
super(TestIPDisabled, self).setUp()
# create 2 pg interfaces
self.create_pg_interfaces(range(2))
# PG0 is IP enalbed
self.pg0.admin_up()
self.pg0.config_ip6()
self.pg0.resolve_ndp()
# PG 1 is not IP enabled
self.pg1.admin_up()
def tearDown(self):
super(TestIPDisabled, self).tearDown()
for i in self.pg_interfaces:
i.unconfig_ip4()
i.admin_down()
def send_and_assert_no_replies(self, intf, pkts, remark):
intf.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
for i in self.pg_interfaces:
i.get_capture(0)
i.assert_nothing_captured(remark=remark)
def test_ip_disabled(self):
""" IP Disabled """
#
# An (S,G).
# one accepting interface, pg0, 2 forwarding interfaces
#
route_ff_01 = VppIpMRoute(
self,
"::",
"ffef::1", 128,
MRouteEntryFlags.MFIB_ENTRY_FLAG_NONE,
[VppMRoutePath(self.pg1.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_ACCEPT),
VppMRoutePath(self.pg0.sw_if_index,
MRouteItfFlags.MFIB_ITF_FLAG_FORWARD)],
is_ip6=1)
route_ff_01.add_vpp_config()
pu = (Ether(src=self.pg1.remote_mac,
dst=self.pg1.local_mac) /
IPv6(src="2001::1", dst=self.pg0.remote_ip6) /
UDP(sport=1234, dport=1234) /
Raw('\xa5' * 100))
pm = (Ether(src=self.pg1.remote_mac,
dst=self.pg1.local_mac) /
IPv6(src="2001::1", dst="ffef::1") /
UDP(sport=1234, dport=1234) /
Raw('\xa5' * 100))
#
# PG1 does not forward IP traffic
#
self.send_and_assert_no_replies(self.pg1, pu, "IPv6 disabled")
self.send_and_assert_no_replies(self.pg1, pm, "IPv6 disabled")
#
# IP enable PG1
#
self.pg1.config_ip6()
#
# Now we get packets through
#
self.pg1.add_stream(pu)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture(1)
self.pg1.add_stream(pm)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rx = self.pg0.get_capture(1)
#
# Disable PG1
#
self.pg1.unconfig_ip6()
#
# PG1 does not forward IP traffic
#
self.send_and_assert_no_replies(self.pg1, pu, "IPv6 disabled")
self.send_and_assert_no_replies(self.pg1, pm, "IPv6 disabled")
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| milanlenco/vpp | test/test_ip6.py | Python | apache-2.0 | 38,904 |
<?php
/**
* Created by PhpStorm.
* User: liangxz@szljfkj.com
* Date: 2017/3/15
* Time: 15:44
* 加载配置文件操作
*/
namespace bootstrap;
class Configuration
{
public static function env($key = "")
{
if(empty($key))
return false;
return self::getName($key);
}
/**
* @param $key
* @return bool|mixed
* 配置文件目前只支持common.php,请不要随意修改
* 前期只支持一维数组的形式获取
*/
private static function getName($key)
{
$fileName = __DIR__.'/../config/common.php';
if(!file_exists($fileName))
return false;
$commonArr = require $fileName;
if(!$commonArr || !is_array($commonArr) || !$commonArr[$key])
return false;
return $commonArr[$key];
}
} | X-Sanji/Le-framework | bootstrap/configuration.php | PHP | apache-2.0 | 863 |
/*Copyright 2010 George Karagoulis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include "gutil_exception.h"
#include <QTcpServer>
#include <QTcpSocket>
#include <QProcess>
using namespace GUtil;
using namespace DataAccess;
MainWindow::MainWindow(bool server, QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow),
sock(0),
srv(0)
{
ui->setupUi(this);
if(server)
{
ui->lblServer->setText("Server");
srv = new QTcpServer(this);
connect(srv, SIGNAL(newConnection()),
this, SLOT(new_connection()));
srv->listen(QHostAddress("127.0.0.1"));
// Start a duplicate process, that will connect to the server
QProcess *p = new QProcess(qApp);
p->start(qApp->argv()[0], QStringList(QString("%1").arg(srv->serverPort())));
}
else
{
ui->lblServer->setText("Client");
sock = new GTcpSocketIODevice(new QTcpSocket(this));
connect(sock, SIGNAL(ReadyRead()), this, SLOT(read_sock()));
sock->Socket().connectToHost("127.0.0.1", QString(qApp->argv()[1]).toUInt());
sock->Socket().waitForConnected(5000);
if(!sock->IsConnected())
throw Exception<>("Not Connected");
}
}
MainWindow::~MainWindow()
{
delete ui;
if(sock)
delete sock;
if(srv)
delete srv;
}
void MainWindow::read_sock()
{
ui->lblReceived->setText(sock->ReceiveData());
}
void MainWindow::send_data()
{
sock->Write(ui->txtSend->toPlainText().toAscii());
}
void MainWindow::new_connection()
{
if(srv->hasPendingConnections())
{
sock = new GTcpSocketIODevice(srv->nextPendingConnection());
connect(sock, SIGNAL(ReadyRead()), this, SLOT(read_sock()));
}
}
| karagog/gutil | src/qt/data_access/test/tcp_socket_transport/mainwindow.cpp | C++ | apache-2.0 | 2,291 |
package org.ebookdroid.core.models;
import org.ebookdroid.core.PageIndex;
import org.ebookdroid.core.events.CurrentPageListener;
import org.ebookdroid.core.events.ListenerProxy;
import org.ebookdroid.core.log.LogContext;
import org.ebookdroid.utils.CompareUtils;
public class CurrentPageModel extends ListenerProxy {
protected static final LogContext LCTX = LogContext.ROOT.lctx("DocModel");
protected PageIndex currentIndex = PageIndex.FIRST;
public CurrentPageModel() {
super(CurrentPageListener.class);
}
public void setCurrentPageIndex(final PageIndex newIndex) {
if (!CompareUtils.equals(currentIndex, newIndex)) {
if (LCTX.isDebugEnabled()) {
LCTX.d("Current page changed: " + "currentIndex" + " -> " + newIndex);
}
final PageIndex oldIndex = this.currentIndex;
this.currentIndex = newIndex;
this.<CurrentPageListener>getListener().currentPageChanged(oldIndex, newIndex);
}
}
public PageIndex getCurrentIndex() {
return this.currentIndex;
}
public int getCurrentViewPageIndex() {
return this.currentIndex.viewIndex;
}
public int getCurrentDocPageIndex() {
return this.currentIndex.docIndex;
}
}
| hk0792/UsefulClass | EBookDroid/src/org/ebookdroid/core/models/CurrentPageModel.java | Java | apache-2.0 | 1,284 |
// (C) Copyright 2015 Martin Dougiamas
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
angular.module('mm.core.login')
/**
* Controller to handle input of user credentials.
*
* @module mm.core.login
* @ngdoc controller
* @name mmLoginCredentialsCtrl
*/
.controller('mmLoginCredentialsCtrl', function($scope, $stateParams, $mmSitesManager, $mmUtil, $ionicHistory, $mmApp,
$q, $mmLoginHelper, $mmContentLinksDelegate, $mmContentLinksHelper, $translate) {
$scope.siteurl = $stateParams.siteurl;
$scope.credentials = {
username: $stateParams.username,
password: $stateParams.password
};
$scope.siteChecked = false;
var urlToOpen = $stateParams.urltoopen,
siteConfig = $stateParams.siteconfig;
treatSiteConfig(siteConfig);
// Function to check if a site uses local_mobile, requires SSO login, etc.
// This should be used only if a fixed URL is set, otherwise this check is already performed in mmLoginSiteCtrl.
function checkSite(siteurl) {
// If the site is configured with http:// protocol we force that one, otherwise we use default mode.
var checkmodal = $mmUtil.showModalLoading(),
protocol = siteurl.indexOf('http://') === 0 ? 'http://' : undefined;
return $mmSitesManager.checkSite(siteurl, protocol).then(function(result) {
$scope.siteChecked = true;
$scope.siteurl = result.siteurl;
treatSiteConfig(result.config);
if (result && result.warning) {
$mmUtil.showErrorModal(result.warning, true, 4000);
}
if ($mmLoginHelper.isSSOLoginNeeded(result.code)) {
// SSO. User needs to authenticate in a browser.
$scope.isBrowserSSO = true;
// Check that there's no SSO authentication ongoing and the view hasn't changed.
if (!$mmApp.isSSOAuthenticationOngoing() && !$scope.$$destroyed) {
$mmLoginHelper.confirmAndOpenBrowserForSSOLogin(
result.siteurl, result.code, result.service, result.config && result.config.launchurl);
}
} else {
$scope.isBrowserSSO = false;
}
}).catch(function(error) {
$mmUtil.showErrorModal(error);
return $q.reject();
}).finally(function() {
checkmodal.dismiss();
// $scope.login();
});
}
// Treat the site's config, setting scope variables.
function treatSiteConfig(siteConfig) {
if (siteConfig) {
$scope.sitename = siteConfig.sitename;
$scope.logourl = siteConfig.logourl || siteConfig.compactlogourl;
$scope.authInstructions = siteConfig.authinstructions || $translate.instant('mm.login.loginsteps');
$scope.canSignup = siteConfig.registerauth == 'email' && !$mmLoginHelper.isEmailSignupDisabled(siteConfig);
} else {
$scope.sitename = null;
$scope.logourl = null;
$scope.authInstructions = null;
$scope.canSignup = false;
}
}
if ($mmLoginHelper.isFixedUrlSet()) {
// Fixed URL, we need to check if it uses browser SSO login.
checkSite($scope.siteurl);
} else {
$scope.siteChecked = true;
}
$scope.login = function() {
$mmApp.closeKeyboard();
// Get input data.
var siteurl = $scope.siteurl,
username = $scope.credentials.username,
password = $scope.credentials.password;
if (!$scope.siteChecked) {
// Site wasn't checked (it failed), let's check again.
return checkSite(siteurl).then(function() {
if (!$scope.isBrowserSSO) {
// Site doesn't use browser SSO, throw app's login again.
return $scope.login();
}
});
} else if ($scope.isBrowserSSO) {
// A previous check determined that browser SSO is needed. Let's check again, maybe site was updated.
return checkSite(siteurl);
}
if (!username) {
$mmUtil.showErrorModal('mm.login.usernamerequired', true);
return;
}
if (!password) {
$mmUtil.showErrorModal('mm.login.passwordrequired', true);
return;
}
var modal = $mmUtil.showModalLoading();
// Start the authentication process.
return $mmSitesManager.getUserToken(siteurl, username, password).then(function(data) {
return $mmSitesManager.newSite(data.siteurl, data.token, data.privatetoken).then(function() {
delete $scope.credentials; // Delete username and password from the scope.
$ionicHistory.nextViewOptions({disableBack: true});
if (urlToOpen) {
// There's a content link to open.
return $mmContentLinksDelegate.getActionsFor(urlToOpen, undefined, username).then(function(actions) {
action = $mmContentLinksHelper.getFirstValidAction(actions);
if (action && action.sites.length) {
// Action should only have 1 site because we're filtering by username.
action.action(action.sites[0]);
} else {
return $mmLoginHelper.goToSiteInitialPage();
}
});
} else {
return $mmLoginHelper.goToSiteInitialPage();
}
});
}).catch(function(error) {
$mmLoginHelper.treatUserTokenError(siteurl, error);
}).finally(function() {
modal.dismiss();
});
};
});
| balirwa/logs | www/core/components/login/controllers/credentials.js | JavaScript | apache-2.0 | 6,343 |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.prism;
import com.evolveum.midpoint.prism.path.ItemPath;
/**
* @author semancik
*
*/
public class PartiallyResolvedValue<V extends PrismValue> {
private Item<V> item;
private ItemPath residualPath;
public PartiallyResolvedValue(Item<V> item, ItemPath residualPath) {
super();
this.item = item;
this.residualPath = residualPath;
}
public Item<V> getItem() {
return item;
}
public void setItem(Item<V> item) {
this.item = item;
}
public ItemPath getResidualPath() {
return residualPath;
}
public void setResidualPath(ItemPath residualPath) {
this.residualPath = residualPath;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((item == null) ? 0 : item.hashCode());
result = prime * result + ((residualPath == null) ? 0 : residualPath.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PartiallyResolvedValue other = (PartiallyResolvedValue) obj;
if (item == null) {
if (other.item != null)
return false;
} else if (!item.equals(other.item))
return false;
if (residualPath == null) {
if (other.residualPath != null)
return false;
} else if (!residualPath.equals(other.residualPath))
return false;
return true;
}
@Override
public String toString() {
return "PartiallyResolvedValue(item=" + item + ", residualPath=" + residualPath + ")";
}
}
| sabriarabacioglu/engerek | infra/prism/src/main/java/com/evolveum/midpoint/prism/PartiallyResolvedValue.java | Java | apache-2.0 | 2,164 |
package com.gufengxiachen.designpattern.Structural.adapter.objectadapter;
import java.util.Enumeration;
public class MessageApplication {
public void showAllMessage(Enumeration enum1) {
Object msg;
while(enum1.hasMoreElements()) {
msg = enum1.nextElement();
System.out.println(msg);
}
}
}
| wustrive2008/java-designpattern | src/main/java/com/gufengxiachen/designpattern/Structural/adapter/objectadapter/MessageApplication.java | Java | apache-2.0 | 357 |
# Copyright 2011-2015, The Trustees of Indiana University and Northwestern
# University. Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# --- END LICENSE_HEADER BLOCK ---
module ConditionalPartials
extend ActiveSupport::Concern
included do
class_attribute :conditional_partials, instance_accessor: false, instance_predicate: false
self.conditional_partials = {}
end
module ClassMethods
def add_conditional_partial partial_list_name, name, opts={}
conditional_partials[partial_list_name] ||= {}
config = opts
config[:name] = name
if block_given?
yield config
end
conditional_partials[partial_list_name][name] = config
end
end
end
| uvalib/avalon | app/controllers/concerns/conditional_partials.rb | Ruby | apache-2.0 | 1,214 |
package uk.co.harrymartland.multijmx.domain.optionvalue.orderconnection;
import com.google.inject.Inject;
import org.apache.commons.cli.Option;
import uk.co.harrymartland.multijmx.domain.optionvalue.AbstractSingleOptionValue;
import uk.co.harrymartland.multijmx.service.commandline.CommandLineService;
public class OrderConnectionOptionValueImpl extends AbstractSingleOptionValue<Boolean> implements OrderConnectionOptionValue {
@Inject
public OrderConnectionOptionValueImpl(CommandLineService commandLineService) {
super(commandLineService);
}
@Override
protected String getMultipleArgumentError() {
return "Order by connectionarg is specified twice";
}
@Override
public Option lazyLoadOption() {
return Option.builder(getArg())
.longOpt("order-connectionarg")
.desc("Order the results by connectionarg")
.build();
}
@Override
public String getArg() {
return "c";
}
@Override
public Boolean lazyLoadValue() {
return hasOption();
}
}
| HarryEMartland/multi-jmx | src/main/java/uk/co/harrymartland/multijmx/domain/optionvalue/orderconnection/OrderConnectionOptionValueImpl.java | Java | apache-2.0 | 1,089 |
// -----------------------------------------------------------------------
// <copyright file="ICacheProvider.cs" company="">
// Copyright (c) 2014-2015 OSky. All rights reserved.
// </copyright>
// <last-editor>Lmf</last-editor>
// <last-date>2015-03-22 15:36</last-date>
// -----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OSky.Core.Caching
{
/// <summary>
/// 缓存提供者约定,用于创建并管理缓存对象
/// </summary>
public interface ICacheProvider
{
/// <summary>
/// 获取缓存对象
/// </summary>
/// <param name="regionName">缓存区域名称</param>
/// <returns></returns>
ICache GetCache(string regionName);
}
} | liumeifu/OSky | src/OSky.Core/Caching/ICacheProvider.cs | C# | apache-2.0 | 878 |
--TEST--
RFC: DateTime and Daylight Saving Time Transitions (zone type 3, bd2)
--CREDITS--
Daniel Convissor <danielc@php.net>
--XFAIL--
Still not quite right
--FILE--
<?php
date_default_timezone_set('America/New_York');
$date_format = 'Y-m-d H:i:s T e';
$interval_format = 'P%dDT%hH';
/*
* For backward transitions, must create objects with zone type 2
* where specifying Daylight or Standard time is required
* then converting them to zone type 3.
*/
$tz = new DateTimeZone('America/New_York');
/*
* Backward Transitions, diff().
*/
$end = new DateTime('2010-11-07 05:30:00');
$end->setTimeZone($tz);
$start = new DateTime('2010-11-06 04:30:59');
echo 'bd0 ' . $end->format($date_format) . ' - ' . $start->format($date_format)
. ' = ' . $start->diff($end)->format('P%dDT%hH%iM%sS') . "\n";
$end = new DateTime('2010-11-07 01:30:00 EST');
$end->setTimeZone($tz);
$start = new DateTime('2010-11-06 04:30:00');
echo 'bd5 ' . $end->format($date_format) . ' - ' . $start->format($date_format)
. ' = ' . $start->diff($end)->format($interval_format) . "\n";
$end = new DateTime('2010-11-07 01:30:00 EDT');
$end->setTimeZone($tz);
$start = new DateTime('2010-11-06 04:30:00');
echo 'bd6 ' . $end->format($date_format) . ' - ' . $start->format($date_format)
. ' = ' . $start->diff($end)->format($interval_format) . "\n";
$end = new DateTime('2010-11-07 01:30:00 EST');
$end->setTimeZone($tz);
$start = new DateTime('2010-11-06 01:30:00');
echo 'bd8 ' . $end->format($date_format) . ' - ' . $start->format($date_format)
. ' = ' . $start->diff($end)->format($interval_format) . "\n";
echo "\n";
?>
--EXPECT--
bd0 2010-11-07 05:30:00 EST America/New_York - 2010-11-06 04:30:59 EDT America/New_York = P1DT1H59M1S
bd5 2010-11-07 01:30:00 EST America/New_York - 2010-11-06 04:30:00 EDT America/New_York = P0DT22H
bd6 2010-11-07 01:30:00 EDT America/New_York - 2010-11-06 04:30:00 EDT America/New_York = P0DT21H
bd8 2010-11-07 01:30:00 EST America/New_York - 2010-11-06 01:30:00 EDT America/New_York = P1DT1H
| jphp-compiler/jphp | exts/jphp-zend-ext/src/main/tests/resources/ext/date/rfc-datetime_and_daylight_saving_time-type3-bd2.phpt | PHP | apache-2.0 | 2,021 |
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing Melange Django settings.
"""
__authors__ = [
'"Madhusudan.C.S" <madhusudancs@gmail.com>',
'"Augie Fackler" <durin42@gmail.com>',
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
'"Lennard de Rijk" <ljvderijk@gmail.com>',
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
import os
# Debug flag True only on App Engine development environment (dev_appserver.py)
# dev_appserver sets SERVER_SOFTWARE to 'Development/1.0'
DEBUG = os.environ['SERVER_SOFTWARE'].startswith('Dev')
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
# 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_ENGINE = 'dummy'
# None of the following are used with appengine
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
# Set to empty string for localhost. Not used with sqlite3.
DATABASE_HOST = ''
# Set to empty string for default. Not used with sqlite3.
DATABASE_PORT = ''
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
# The order of the middleware is as follows because:
# - The ValueStore middleware should be before any other middleware
# so that the value store is available to it.
# - The ExceptionHandler should be the outermost handler (after the
# ValueStore) so as to catch as many errors as possible.
# - The Profile middleware should be as outmost as possible, so that
# as many function calls as possible, but it cannot be before the
# ExceptionHandler (so as to catch exceptions thrown by it).
# - The MaintenanceMiddleware should be after the Profiler, since we
# do want it's actions profiled.
MIDDLEWARE_CLASSES = (
'google.appengine.ext.appstats.recording.AppStatsDjangoMiddleware',
'soc.middleware.value_store.ValueStoreMiddleware',
# 'soc.middleware.exception_handler.ExceptionHandlerMiddleware',
# 'soc.middleware.profiler.ProfileMiddleware',
'soc.middleware.maintenance.MaintenanceMiddleware',
'soc.middleware.blobstore.BlobStoreMiddleware',
'soc.middleware.xsrf.XsrfMiddleware',
# 'django.middleware.common.CommonMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.middleware.doc.XViewMiddleware',
)
ROOT_URLCONF = 'urls'
ROOT_PATH = os.path.dirname(__file__)
TEMPLATE_DIRS = (
# TODO(proto): customize the template search directories
os.path.join(ROOT_PATH, 'soc', 'templates'),
os.path.join(ROOT_PATH, 'shell', 'templates'),
)
INSTALLED_APPS = (
'soc.views.helper',
'soc.modules.gsoc.views.helper',
'soc.modules.gci.views.helper',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.sites',
)
GCI_TASK_QUOTA_LIMIT_ENABLED = False
MODULE_FMT = 'soc.modules.%s.callback'
MODULES = ['gsoc', 'statistic', 'gci']
| SRabbelier/Melange | app/settings.py | Python | apache-2.0 | 4,946 |
"""Base actions for the players to take."""
from csrv.model.actions import action
from csrv.model import cost
from csrv.model import errors
from csrv.model import events
from csrv.model import game_object
from csrv.model import parameters
class PlayOperationAction(action.Action):
DESCRIPTION = '[click]: Play an operation'
COST_CLASS = cost.OperationCost
def resolve(self, response=None, ignore_clicks=False, ignore_all_costs=False):
action.Action.resolve(
self,
ignore_clicks=ignore_clicks,
ignore_all_costs=ignore_all_costs)
self.player.hq.remove(self.card)
self.card.is_faceup = True
self.card.play()
self.card.log('The corp plays %s' % self.card)
self.player.archives.add(self.card)
@property
def description(self):
return 'Play %s' % self.card.NAME
| mrroach/CentralServer | csrv/model/actions/play_operation_action.py | Python | apache-2.0 | 825 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from collections import defaultdict
import ray
import ray.cloudpickle as cloudpickle
# This string should be identical to the name of the signal sent upon
# detecting that an actor died.
# This constant is also used in NodeManager::PublishActorStateTransition()
# in node_manager.cc
ACTOR_DIED_STR = "ACTOR_DIED_SIGNAL"
logger = logging.getLogger(__name__)
class Signal(object):
"""Base class for Ray signals."""
pass
class ErrorSignal(Signal):
"""Signal raised if an exception happens in a task or actor method."""
def __init__(self, error):
self.error = error
class ActorDiedSignal(Signal):
"""Signal raised if an exception happens in a task or actor method."""
def __init__(self):
pass
def _get_task_id(source):
"""Return the task id associated to the generic source of the signal.
Args:
source: source of the signal, it can be either an object id returned
by a task, a task id, or an actor handle.
Returns:
- If source is an object id, return id of task which creted object.
- If source is an actor handle, return id of actor's task creator.
- If source is a task id, return same task id.
"""
if type(source) is ray.actor.ActorHandle:
return source._actor_id
else:
if type(source) is ray.TaskID:
return source
else:
return ray._raylet.compute_task_id(source)
def send(signal):
"""Send signal.
The signal has a unique identifier that is computed from (1) the id
of the actor or task sending this signal (i.e., the actor or task calling
this function), and (2) an index that is incremented every time this
source sends a signal. This index starts from 1.
Args:
signal: Signal to be sent.
"""
if hasattr(ray.worker.global_worker, "actor_creation_task_id"):
source_key = ray.worker.global_worker.actor_id.hex()
else:
# No actors; this function must have been called from a task
source_key = ray.worker.global_worker.current_task_id.hex()
encoded_signal = ray.utils.binary_to_hex(cloudpickle.dumps(signal))
ray.worker.global_worker.redis_client.execute_command(
"XADD " + source_key + " * signal " + encoded_signal)
def receive(sources, timeout=None):
"""Get all outstanding signals from sources.
A source can be either (1) an object ID returned by the task (we want
to receive signals from), or (2) an actor handle.
When invoked by the same entity E (where E can be an actor, task or
driver), for each source S in sources, this function returns all signals
generated by S since the last receive() was invoked by E on S. If this is
the first call on S, this function returns all past signals generated by S
so far. Note that different actors, tasks or drivers that call receive()
on the same source S will get independent copies of the signals generated
by S.
Args:
sources: List of sources from which the caller waits for signals.
A source is either an object ID returned by a task (in this case
the object ID is used to identify that task), or an actor handle.
If the user passes the IDs of multiple objects returned by the
same task, this function returns a copy of the signals generated
by that task for each object ID.
timeout: Maximum time (in seconds) this function waits to get a signal
from a source in sources. If None, the timeout is infinite.
Returns:
A list of pairs (S, sig), where S is a source in the sources argument,
and sig is a signal generated by S since the last time receive()
was called on S. Thus, for each S in sources, the return list can
contain zero or multiple entries.
"""
# If None, initialize the timeout to a huge value (i.e., over 30,000 years
# in this case) to "approximate" infinity.
if timeout is None:
timeout = 10**12
if timeout < 0:
raise ValueError("The 'timeout' argument cannot be less than 0.")
if not hasattr(ray.worker.global_worker, "signal_counters"):
ray.worker.global_worker.signal_counters = defaultdict(lambda: b"0")
signal_counters = ray.worker.global_worker.signal_counters
# Map the ID of each source task to the source itself.
task_id_to_sources = defaultdict(lambda: [])
for s in sources:
task_id_to_sources[_get_task_id(s).hex()].append(s)
if timeout < 1e-3:
logger.warning("Timeout too small. Using 1ms minimum")
timeout = 1e-3
timeout_ms = int(1000 * timeout)
# Construct the redis query.
query = "XREAD BLOCK "
# redis expects ms.
query += str(timeout_ms)
query += " STREAMS "
query += " ".join([task_id for task_id in task_id_to_sources])
query += " "
query += " ".join([
ray.utils.decode(signal_counters[ray.utils.hex_to_binary(task_id)])
for task_id in task_id_to_sources
])
answers = ray.worker.global_worker.redis_client.execute_command(query)
if not answers:
return []
results = []
# Decoding is a little bit involved. Iterate through all the answers:
for i, answer in enumerate(answers):
# Make sure the answer corresponds to a source, s, in sources.
task_id = ray.utils.decode(answer[0])
task_source_list = task_id_to_sources[task_id]
# The list of results for source s is stored in answer[1]
for r in answer[1]:
for s in task_source_list:
if r[1][1].decode("ascii") == ACTOR_DIED_STR:
results.append((s, ActorDiedSignal()))
else:
# Now it gets tricky: r[0] is the redis internal sequence
# id
signal_counters[ray.utils.hex_to_binary(task_id)] = r[0]
# r[1] contains a list with elements (key, value), in our
# case we only have one key "signal" and the value is the
# signal.
signal = cloudpickle.loads(
ray.utils.hex_to_binary(r[1][1]))
results.append((s, signal))
return results
def forget(sources):
"""Ignore all previous signals associated with each source S in sources.
The index of the next expected signal from S is set to the index of
the last signal that S sent plus 1. This means that the next receive()
on S will only get the signals generated after this function was invoked.
Args:
sources: list of sources whose past signals are forgotten.
"""
# Just read all signals sent by all sources so far.
# This will results in ignoring these signals.
receive(sources, timeout=0)
def reset():
"""
Reset the worker state associated with any signals that this worker
has received so far.
If the worker calls receive() on a source next, it will get all the
signals generated by that source starting with index = 1.
"""
if hasattr(ray.worker.global_worker, "signal_counters"):
ray.worker.global_worker.signal_counters = defaultdict(lambda: b"0")
| ujvl/ray-ng | python/ray/experimental/signal.py | Python | apache-2.0 | 7,355 |
# -*- encoding: utf-8 -*-
require "bundler"
require "bundler/definition"
require "bundler/lockfile_parser"
require "pathname"
module RubyScaffolding
class GemfileParser
def initialize(lockfile_path)
@lockfile_path = Pathname(lockfile_path)
if !@lockfile_path.file?
raise "Lockfile not found: #{@lockfile_path}"
end
end
def ruby_version
version = locked_gems.ruby_version
version && version.sub(/p\d+/, "")
end
def has_gem?(name)
specs.key?(name)
end
def gem_version(name)
specs.fetch(name).version
end
private
def locked_gems
@locked_gems ||= begin
contents = Bundler.read_file(@lockfile_path)
Bundler::LockfileParser.new(contents)
end
end
def specs
@specs ||= locked_gems.specs.
each_with_object({}) { |spec, hash| hash[spec.name] = spec }
end
end
end
| be-plans/be | scaffolding-ruby/lib/ruby_scaffolding/gemfile_parser.rb | Ruby | apache-2.0 | 909 |
<?php
/**
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Amazon\Payment\Exception;
class CapturePendingException extends \Exception
{
public function __construct($message = '', $code = 0, \Exception $previous = null)
{
parent::__construct($message, $code, $previous);
}
}
| Smith-and-Associates/amazon-payments-magento-2-plugin | src/Payment/Exception/CapturePendingException.php | PHP | apache-2.0 | 833 |
/**
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.cli.commands;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.internal.Lists;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import java.util.List;
import org.kitesdk.data.Datasets;
import org.kitesdk.data.URIBuilder;
import org.kitesdk.data.View;
import org.kitesdk.data.spi.DatasetRepositories;
import org.kitesdk.data.spi.DatasetRepository;
import org.slf4j.Logger;
abstract class BaseDatasetCommand extends BaseCommand {
@Parameter(names = {"-d", "--directory"}, hidden = true,
description = "Storage directory for datasets, required for HDFS")
String directory = null;
@Parameter(names = {"--namespace"}, hidden = true,
description = "Namespace for datasets")
String namespace = "default";
@Parameter(names = {"--use-local"}, hidden = true,
description = "Store data in local files")
boolean local = false;
@Parameter(names = {"--use-hdfs"}, hidden = true,
description = "Store data in HDFS files")
boolean hdfs = false;
@Parameter(names = {"--use-hive"}, hidden = true,
description = "Store data in Hive managed tables (default)")
boolean hive = false;
@Parameter(names = {"--use-hbase"}, hidden = true,
description = "Store data in HBase tables")
boolean hbase = false;
@Parameter(names = {"--zookeeper", "--zk"}, hidden = true,
description = "ZooKeeper host list as host or host:port")
List<String> zookeeper = Lists.newArrayList("localhost");
@VisibleForTesting
@Parameter(names = {"-r", "--repo"}, hidden = true,
description="The repository URI to open")
String repoURI = null;
protected final Logger console;
private DatasetRepository repo = null;
public BaseDatasetCommand(Logger console) {
this.console = console;
}
protected DatasetRepository getDatasetRepository() {
if (repo == null) {
this.repo = DatasetRepositories.repositoryFor(buildRepoURI());
}
return repo;
}
protected boolean isDataUri(String uriOrName) {
return (uriOrName.startsWith("dataset:") || uriOrName.startsWith("view:"));
}
protected boolean isRepoUri(String uriOrName) {
return uriOrName.startsWith("repo:");
}
protected <E> View<E> load(String uriOrName, Class<E> type) {
if (isDataUri(uriOrName)) {
return Datasets.<E, View<E>>load(uriOrName, type);
} else {
return getDatasetRepository().load(namespace, uriOrName);
}
}
@VisibleForTesting
public String buildRepoURI() {
if (repoURI != null) {
if (repoURI.startsWith("repo:")) {
return repoURI;
} else {
return "repo:" + repoURI;
}
}
String uri;
if (local) {
Preconditions.checkArgument(!(hdfs || hive || hbase),
"Only one storage implementation can be selected");
Preconditions.checkArgument(directory != null,
"--directory is required when using local files");
uri = "repo:file:" + directory;
} else if (hdfs) {
Preconditions.checkArgument(!(hive || hbase),
"Only one storage implementation can be selected");
Preconditions.checkArgument(directory != null,
"--directory is required when using HDFS");
uri = "repo:hdfs:" + directory;
} else if (hbase) {
Preconditions.checkArgument(!hive,
"Only one storage implementation can be selected");
Preconditions.checkArgument(zookeeper != null && !zookeeper.isEmpty(),
"--zookeeper is required when using HBase");
uri = "repo:hbase:" + Joiner.on(",").join(zookeeper);
} else {
uri = "repo:hive" + (directory != null ? ":" + directory : "");
}
console.trace("Repository URI: " + uri);
return uri;
}
String buildDatasetUri(String uriOrName) {
if (isDataUri(uriOrName)) {
return uriOrName;
}
return new URIBuilder(buildRepoURI(), namespace, uriOrName).build().toString();
}
}
| dlanza1/kite | kite-tools-parent/kite-tools/src/main/java/org/kitesdk/cli/commands/BaseDatasetCommand.java | Java | apache-2.0 | 4,580 |
package net.onrc.onos.core.metrics.web.serializers;
import com.codahale.metrics.json.MetricsModule;
import com.fasterxml.jackson.databind.ObjectMapper;
import net.onrc.onos.core.metrics.web.MetricsObjectResource;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.SerializerProvider;
import org.codehaus.jackson.map.ser.std.SerializerBase;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
/**
* JSON serializer for the Metrics resource.
*/
public class MetricsObjectSerializer extends SerializerBase<MetricsObjectResource> {
/**
* Public constructor - just calls its super class constructor.
*/
public MetricsObjectSerializer() {
super(MetricsObjectResource.class);
}
/**
* Convenience method to serialize a Metrics field.
*
* @param jsonGenerator generator to use for serialization
* @param fieldName name of the top level field
* @param serializedObjectJSON JSON representation from the Metrics serializer
* @param object internal resource for the Metric
* @throws IOException if JSON generation fails.
*/
private void serializeItem(final JsonGenerator jsonGenerator,
final String fieldName,
final String serializedObjectJSON,
final MetricsObjectResource.BaseMetricObject object)
throws IOException {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("name", object.getName());
// If you write the JSON for the Metric using a StringField, the
// generator applies an extra set of double quotes and breaks the
// syntax. You have to use the raw JSON output to get it right.
jsonGenerator.writeRaw(",\"" + fieldName + "\": " + serializedObjectJSON);
jsonGenerator.writeEndObject();
}
/**
* Serialize a MetricsObjectResource into JSON. For each kind of Metric,
* his serializes common ONOS defined fields like name and
* then calls the Metrics serializer to make the JSON string
* for the actual Metric.
*
* @param metrics resource for all ONOS Metrics
* @param jsonGenerator generator to use for the JSON output
* @param serializerProvider unused, needed for Override
* @throws IOException if any of the JSON serializations fail
*/
@Override
@SuppressWarnings("rawtypes")
public void serialize(final MetricsObjectResource metrics,
final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider)
throws IOException {
final ObjectMapper mapper = new ObjectMapper().registerModule(
new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, false));
jsonGenerator.writeStartObject();
// serialize Timers
jsonGenerator.writeArrayFieldStart("timers");
for (final MetricsObjectResource.TimerObjectResource timer :
metrics.getTimers()) {
final String timerJSON = mapper.writeValueAsString(timer.getTimer());
serializeItem(jsonGenerator, "timer", timerJSON, timer);
}
jsonGenerator.writeEndArray();
// Serialize Gauges
jsonGenerator.writeArrayFieldStart("gauges");
for (final MetricsObjectResource.GaugeObjectResource gauge :
metrics.getGauges()) {
final String gaugeJSON = mapper.writeValueAsString(gauge.getGauge());
serializeItem(jsonGenerator, "gauge", gaugeJSON, gauge);
}
jsonGenerator.writeEndArray();
// Serialize Counters
jsonGenerator.writeArrayFieldStart("counters");
for (final MetricsObjectResource.CounterObjectResource counter :
metrics.getCounters()) {
final String counterJSON = mapper.writeValueAsString(counter.getCounter());
serializeItem(jsonGenerator, "counter", counterJSON, counter);
}
jsonGenerator.writeEndArray();
// Serialize Meters
jsonGenerator.writeArrayFieldStart("meters");
for (final MetricsObjectResource.MeterObjectResource meter :
metrics.getMeters()) {
final String meterJSON = mapper.writeValueAsString(meter.getMeter());
serializeItem(jsonGenerator, "meter", meterJSON, meter);
}
jsonGenerator.writeEndArray();
// Serialize Histograms
jsonGenerator.writeArrayFieldStart("histograms");
for (final MetricsObjectResource.HistogramObjectResource histogram :
metrics.getHistograms()) {
final String histogramJSON = mapper.writeValueAsString(histogram.getHistogram());
serializeItem(jsonGenerator, "histogram", histogramJSON, histogram);
}
jsonGenerator.writeEndArray();
jsonGenerator.writeEndObject();
}
}
| opennetworkinglab/spring-open | src/main/java/net/onrc/onos/core/metrics/web/serializers/MetricsObjectSerializer.java | Java | apache-2.0 | 4,931 |
package alphasniper.com.github.cardview;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | alphasniper/android-examples | CardView/app/src/test/java/alphasniper/com/github/cardview/ExampleUnitTest.java | Java | apache-2.0 | 324 |
/*
* Copyright 2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.core.rest;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.converter.ConverterFactory;
import org.springframework.stereotype.Component;
/**
*
* {@link ConverterFactory} for converting String to Enums (case-insensitive).
*
* @see ConverterFactory
* @since 7.5
*/
@Component
@SuppressWarnings({ "rawtypes", "unchecked" })
public class StringToEnumConverterFactory implements ConverterFactory<String, Enum> {
private static class StringToEnumConverter<T extends Enum> implements Converter<String, T> {
private Class<T> enumType;
public StringToEnumConverter(Class<T> enumType) {
this.enumType = enumType;
}
public T convert(String source) {
return (T) Enum.valueOf(this.enumType, source.trim().toUpperCase());
}
}
@Override
public <T extends Enum> Converter<String, T> getConverter(Class<T> targetType) {
return new StringToEnumConverter(targetType);
}
}
| b2ihealthcare/snow-owl | core/com.b2international.snowowl.core.rest/src/com/b2international/snowowl/core/rest/StringToEnumConverterFactory.java | Java | apache-2.0 | 1,602 |
const assert = require('assert');
const Promise = require('bluebird');
const moment = require('moment');
const withV4 = require('../support/withV4');
const BucketUtility = require('../../lib/utility/bucket-util');
const checkError = require('../../lib/utility/checkError');
const changeObjectLock = require('../../../../utilities/objectLock-util');
const changeLockPromise = Promise.promisify(changeObjectLock);
const bucketName = 'lockenabledbucket';
const unlockedBucket = 'locknotenabledbucket';
const objectName = 'putobjectretentionobject';
const noRetentionObject = 'objectwithnoretention';
const retainDate = moment().add(1, 'days').toISOString();
const retentionConfig = {
Mode: 'GOVERNANCE',
RetainUntilDate: retainDate,
};
// aws sdk manipulates dates by removing milliseconds
// and converting date strings to date objects
function manipulateDate() {
const noMillis = `${retainDate.slice(0, 19)}.000Z`;
return new Date(noMillis);
}
const expectedConfig = {
Mode: 'GOVERNANCE',
RetainUntilDate: manipulateDate(),
};
const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('GET object retention', () => {
withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3;
const otherAccountBucketUtility = new BucketUtility('lisa', {});
const otherAccountS3 = otherAccountBucketUtility.s3;
let versionId;
beforeEach(() => {
process.stdout.write('Putting buckets and objects\n');
return s3.createBucket({
Bucket: bucketName,
ObjectLockEnabledForBucket: true,
}).promise()
.then(() => s3.createBucket({ Bucket: unlockedBucket }).promise())
.then(() => s3.putObject({ Bucket: unlockedBucket, Key: objectName }).promise())
.then(() => s3.putObject({ Bucket: bucketName, Key: noRetentionObject }).promise())
.then(() => s3.putObject({ Bucket: bucketName, Key: objectName }).promise())
.then(res => {
versionId = res.VersionId;
process.stdout.write('Putting object retention\n');
return s3.putObjectRetention({
Bucket: bucketName,
Key: objectName,
Retention: retentionConfig,
}).promise();
})
.catch(err => {
process.stdout.write('Error in beforeEach\n');
throw err;
});
});
afterEach(() => {
process.stdout.write('Removing object lock\n');
return changeLockPromise([{ bucket: bucketName, key: objectName, versionId }], '')
.then(() => {
process.stdout.write('Emptying and deleting buckets\n');
return bucketUtil.empty(bucketName);
})
.then(() => bucketUtil.empty(unlockedBucket))
.then(() => bucketUtil.deleteMany([bucketName, unlockedBucket]))
.catch(err => {
process.stdout.write('Error in afterEach');
throw err;
});
});
it('should return AccessDenied putting retention with another account',
done => {
otherAccountS3.getObjectRetention({
Bucket: bucketName,
Key: objectName,
}, err => {
checkError(err, 'AccessDenied', 403);
done();
});
});
it('should return NoSuchKey error if key does not exist', done => {
s3.getObjectRetention({
Bucket: bucketName,
Key: 'thiskeydoesnotexist',
}, err => {
checkError(err, 'NoSuchKey', 404);
done();
});
});
it('should return NoSuchVersion error if version does not exist', done => {
s3.getObjectRetention({
Bucket: bucketName,
Key: objectName,
VersionId: '000000000000',
}, err => {
checkError(err, 'NoSuchVersion', 404);
done();
});
});
it('should return MethodNotAllowed if object version is delete marker',
done => {
s3.deleteObject({ Bucket: bucketName, Key: objectName }, (err, res) => {
assert.ifError(err);
s3.getObjectRetention({
Bucket: bucketName,
Key: objectName,
VersionId: res.VersionId,
}, err => {
checkError(err, 'MethodNotAllowed', 405);
done();
});
});
});
it('should return InvalidRequest error getting retention to object ' +
'in bucket with no object lock enabled', done => {
s3.getObjectRetention({
Bucket: unlockedBucket,
Key: objectName,
}, err => {
checkError(err, 'InvalidRequest', 400);
done();
});
});
it('should return NoSuchObjectLockConfiguration if no retention set',
done => {
s3.getObjectRetention({
Bucket: bucketName,
Key: noRetentionObject,
}, err => {
checkError(err, 'NoSuchObjectLockConfiguration', 404);
done();
});
});
it('should get object retention', done => {
s3.getObjectRetention({
Bucket: bucketName,
Key: objectName,
}, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.Retention, expectedConfig);
changeObjectLock([
{ bucket: bucketName, key: objectName, versionId }], '', done);
});
});
});
});
| scality/S3 | tests/functional/aws-node-sdk/test/object/getRetention.js | JavaScript | apache-2.0 | 6,001 |
# Copyright 2022 The T5 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preprocessors for T5 Tasks."""
# TODO(adarob): Move some of the more general preprocessors to seqio.
import collections
import functools
import math
import re
from typing import Callable, Mapping, Optional, Sequence, Union
import uuid
from absl import logging
import babel
import gin
import seqio
import tensorflow.compat.v2 as tf
# We disable no-value-for-parameter since the seqio.map_over_dataset leads to
# a false positive when seeds are provided.
# pylint:disable=no-value-for-parameter
AUTOTUNE = tf.data.experimental.AUTOTUNE
FeatureType = Mapping[str, tf.Tensor]
rekey = seqio.preprocessors.rekey
tokenize = seqio.preprocessors.tokenize
@seqio.map_over_dataset
def translate(x, source_language, target_language):
"""Convert a translation dataset to a text2text pair.
For example, say the dataset returns examples of this format:
{'de': 'Das ist gut.', 'en': 'That is good.'}
If source_language = 'de', target_language = 'en', then the outputs will have
the format:
{'inputs': 'translate German to English: Das ist gut.',
'targets': 'That is good.'}
Args:
x: an example to process.
source_language: source language code (e.g. 'en') to translate from.
target_language: target language code (e.g. 'de') to translate to.
Returns:
A preprocessed example with the format listed above.
"""
# Language codes like zh-cn are not supported; use only the first 2 chars
for language in (source_language, target_language):
if language != language[:2]:
logging.warning(
'Extended language code %s not supported. Falling back on %s.',
language, language[:2]
)
lang_id_to_string = {
source_language: babel.Locale(source_language[:2]).english_name,
target_language: babel.Locale(target_language[:2]).english_name,
}
src_str = 'translate {}'.format(lang_id_to_string[source_language])
tgt_str = ' to {}: '.format(lang_id_to_string[target_language])
return {
'inputs': tf.strings.join([src_str, tgt_str, x[source_language]]),
'targets': x[target_language],
}
@seqio.map_over_dataset
def summarize(x, article_key, summary_key):
"""Convert a summarization dataset to a text2text pair.
For example, say the dataset returns examples of this format:
{'article': <article>, 'highlights': <summary>}
If article_key = 'article', summary_key = 'highlights', then the outputs will
have the format:
{'inputs': 'summarize': <article>, 'targets': <summary>}
Args:
x: an example to process.
article_key: the feature key for the article to summarize.
summary_key: the feature key for the target summary.
Returns:
A preprocessed example with the format listed above.
"""
strs_to_join = ['summarize:', x[article_key]]
return {
'inputs': tf.strings.join(strs_to_join, separator=' '),
'targets': x[summary_key],
}
# Unicode ranges for characters in non-spaced languages.
# https://en.wikipedia.org/wiki/Category:Writing_systems_without_word_boundaries
# https://en.wikipedia.org/wiki/Han_unification#Unicode_ranges
# https://linguistics.stackexchange.com/questions/6131
NON_SPACED_LANGUAGE_RANGES = (
'\u1000-\u104f', # Burmese
'\u4e00-\u9fff', # CJK Unified Ideographs
'\u3400-\u4dbf', # CJK Unified Ideographs Extension A
'\uf900-\ufaff', # CJK Compatibility Ideographs
'\u2e80-\u2eff', # CJK Radicals Supplement
'\u31c0-\u31ef', # CJK Strokes
'\u3000-\u303f', # CJK Symbols and Punctuation
'\u3040-\u309f', # Japanese Hiragana
'\u30a0-\u30ff', # Japanese Katakana
'\ua980-\ua9df', # Javanese
'\u1780-\u17ff', # Khmer
'\u19e0-\u19ff', # Khmer Symbols
'\u0e80-\u0eff', # Lao
'\u1980-\u19df', # Tai Lue
'\u1a20-\u1aaf', # Tai Tham
'\u0e00-\u0e7f', # Thai
'\u0f00-\u0fff', # Tibetan
)
@seqio.map_over_dataset
def pad_nonspaced_languages(x, text_key='text'):
"""Pad non-spaced languages with spaces around each character.
Args:
x: an example to process.
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
Returns:
A preprocessed example.
"""
res = dict(x)
text = res[text_key]
# Add spaces around any character from a non-spaced language.
pattern = ''.join(NON_SPACED_LANGUAGE_RANGES)
text = tf.strings.regex_replace(text, u'([{}])'.format(pattern), r' \1 ')
# Collapse consecutive whitespace into one space.
text = tf.strings.regex_replace(text, r'\s+', ' ')
res[text_key] = text
return res
def _pad_punctuation(text):
"""Adds spaces around punctuation."""
# Add space around punctuation.
text = tf.strings.regex_replace(text, r'(\W)', r' \1 ')
# Collapse consecutive whitespace into one space.
text = tf.strings.regex_replace(text, r'\s+', ' ')
return text
def _string_join(lst):
# Join on space, but collapse consecutive spaces.
out = tf.strings.join(lst, separator=' ')
return tf.strings.regex_replace(out, r'\s+', ' ')
def trivia_qa(dataset):
"""Convert a TriviaQA example to multiple flattened examples.
TriviaQA produces examples with this form:
{'entity_pages': {dict of wiki entities},
'search_results': <dict of web search results>,
'answer': {dict of all answers}, 'question': <question>,
'question_id': <question_id>, 'question_source': <question_source>}
This function will return flattend examples of the format:
{'inputs': 'question: <question> context: <article>'
'targets': 'answer: <sampled answer>'}
Args:
dataset: a tf.data.Dataset to process.
Returns:
A preprocessed tf.data.Dataset with the format listed above.
"""
def triviaqa_question_answer_context(x):
"""Extracts matched contexts and answers.
Returns all matched (question-context, answer) pairs.
Args:
x: A tfds sample.
Returns:
Flattened samples: (question-context, answer).
"""
contexts = []
if 'entity_pages' in x:
contexts.append(x['entity_pages']['wiki_context'])
if 'search_results' in x:
contexts.append(x['search_results']['search_context'])
contexts = tf.concat(contexts, 0)
q = _pad_punctuation(x['question'])
answers = x['answer']['normalized_aliases']
combination_size = tf.size(answers)*tf.size(contexts)
find_answers = tf.TensorArray(
tf.bool, size=combination_size, dynamic_size=True)
selected_answers = tf.TensorArray(
tf.string, size=combination_size, dynamic_size=True)
join_q_c = tf.TensorArray(
tf.string, size=combination_size, dynamic_size=True)
def cond_fn(i, find_answers, selected_answers, join_q_c):
del find_answers, selected_answers, join_q_c # Unused
return tf.less(i, combination_size)
def body_fn(i, find_answers, selected_answers, join_q_c):
"""Find answers from contexts and join."""
context_idx = tf.math.floordiv(i, tf.size(answers))
answer_idx = tf.math.mod(i, tf.size(answers))
a = _pad_punctuation(answers[answer_idx])
a_ = tf.strings.join(['.*', a, '.*'])
c = _pad_punctuation(contexts[context_idx])
find_a = tf.strings.regex_full_match(
tf.strings.lower(c),
tf.strings.lower(a_))
find_answers = find_answers.write(i, find_a)
selected_answers = selected_answers.write(i, a)
join_q_c_str = _string_join(['question:', q, 'context:', c])
join_q_c = join_q_c.write(i, join_q_c_str)
return (i + 1, find_answers, selected_answers, join_q_c)
_, find_answers, selected_answers, join_q_c = tf.while_loop(
cond_fn,
body_fn,
loop_vars=[
tf.constant(0), find_answers, selected_answers,
join_q_c
])
find_answers = find_answers.stack()
selected_answers = selected_answers.stack()
join_q_c = join_q_c.stack()
selected_answers = tf.boolean_mask(selected_answers, find_answers)
selected_join_q_c = tf.boolean_mask(join_q_c, find_answers)
return selected_join_q_c, selected_answers
def my_fn(x):
"""Create TriviaQA example."""
join_q_c, a = triviaqa_question_answer_context(x)
return {
'inputs': join_q_c,
'targets': a
}
dataset = dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
return dataset.unbatch()
@seqio.map_over_dataset
def squad(x, include_context=True):
"""Convert SQuAD examples to a text2text pair.
SQuAD produces examples with this form:
{'id': <id>, context': <article>, 'question': <question>,
'answers': { 'text': [<n answers>] }}
This function will return examples of the format:
{'inputs': 'question: <question> context: <article>',
'targets': '<answer_0>',
'id': <id>, 'question': <question>, 'context': <context>,
'answers': [<n answers>]},
Args:
x: an example to process.
include_context: a boolean
Returns:
A preprocessed example with the format listed above.
"""
a = _pad_punctuation(x['answers']['text'])
q = _pad_punctuation(x['question'])
c = _pad_punctuation(x['context'])
if include_context:
inputs = _string_join(['question:', q, 'context:', c])
else:
inputs = _string_join(['squad trivia question:', q])
return {
'inputs': inputs,
'targets': a[0],
'id': x['id'],
'context': c,
'question': q,
'answers': a
}
def _span_answer(context, answer_text):
"""Finds start/end indices of answer_text in context after space tokenization.
If answer_tokens is not a sublist of context_tokens, returns empty string.
Args:
context: 0-d string tensor
answer_text: 0-d string
Returns:
A string tensor.
"""
def space_tok(s):
"""Replace non-word chars with space then split on space."""
s = tf.strings.regex_replace(s, r'\W', ' ')
return tf.strings.split(input=[s], sep=' ').values
def find_subseq(n, h):
"""Finds index of needle subsequence inside haystack.
Args:
n: 1-d tensor
h: 1-d tensor same type as n
Returns:
Index of start of n if found found; otherwise -1.
"""
l_n = tf.size(n)
l_h = tf.size(h)
found = -1
for i in tf.range(0, l_h - l_n):
if tf.reduce_all(tf.equal(h[i:i+l_n], n)):
found = i
break
return found
answer_tokens = space_tok(answer_text)
context_tokens = space_tok(context)
start = find_subseq(answer_tokens, context_tokens)
end = start + tf.size(answer_tokens) - 1
# Just take the first candidate that matches exactly.
if tf.equal(start, -1):
return ''
return tf.strings.format('start: {} end: {}', [start, end])
def squad_span_space_tokenized(dataset):
"""Convert SQuAD examples to a text2text pair with span output.
SQuAD produces examples with this form:
{'context': <article>, 'question': <question>,
'answers': { 'text': [<all answers>] }}
This function returns examples with the format
{'inputs': 'context: <article> question: <question>',
'targets': 'start: <start_index> end: <end_index>'}
where <start_index> and <end_index> specify the space-tokenized span
start/end indices. Both <start_index> and <end_index> are included in
the answer. In the case where the tokenized answer is
not found in the tokenized context, the example is skipped.
Args:
dataset: a tf.data.Dataset to process.
Returns:
A preprocessed tf.data.Dataset with the format listed above.
"""
def my_fn(x):
"""Create squad example as in squad_span_char, but tokenized on spaces."""
res = dict(x)
res['targets'] = _span_answer(x['context'], x['targets'])
return res
dataset = squad(dataset)
dataset = dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
return dataset.filter(lambda x: tf.strings.length(x['targets']) > 0)
def random_split_text(dataset,
text_key='text',
min_words_per_segment=16,
max_words_per_segment=512,
max_words_total=8192):
"""Randomly split single-string examples into multiple examples each.
Segment lengths are chosen according to a log-uniform distribution.
Each incoming string is chopped into multiple equal-length examples
with the last one possibly being shorter.
If the input string is longer than max_words_total, then we use one random
chunk and discard the rest. This may help with model stability.
The intended use case is to break up long text examples for use in
unsupervised transfer-learning.
We don't really want to use this preprocessor for any dataset which has a
well-defined evaluation procedure. If apply this preprocessor e.g. in an MT
component, then the evaluation job will randomly split text when evaluating
and the BLEU will get funky.
Args:
dataset: a tf.data.Dataset with dictionaries containing the key text_key
text_key: a string
min_words_per_segment: an integer
max_words_per_segment: an integer
max_words_total: an integer
Returns:
a dataset
"""
def random_chunk(x, chunk_size, seed):
"""Pick a random chunk of a 1d Tensor.
The tensor is divided into chunks of length chunk_size, with the last
chunk being potentially smaller. A random chunk is returned.
Args:
x: a 1d tf.Tensor.
chunk_size: an integer.
seed: int32 [2]-Tensor, the random seed.
Returns:
a 1d tf.Tensor with length <= chunk_size.
"""
size = tf.size(x)
num_chunks = tf.maximum(1, (size - 1) // chunk_size + 1)
chunk_num = tf.random.stateless_uniform(
[],
seed=seed,
minval=0,
maxval=num_chunks,
dtype=tf.int32)
return x[chunk_size * chunk_num:chunk_size * (chunk_num + 1)]
@seqio.map_over_dataset(num_seeds=2)
def my_fn(x, seeds):
"""Split one string into multiple strings.
Args:
x: a feature dictionary
seeds: an int32 Tensor, shaped (2, 2), the random seeds.
Returns:
a feature dictionary
"""
text = x[text_key]
words = tf.strings.split([text]).values
if max_words_total:
words = random_chunk(words, max_words_total, seed=seeds[0])
n_words = tf.size(words)
# first pick a length (number of words per segment)
length = tf.cast(
tf.exp(
tf.random.stateless_uniform(
[],
minval=math.log(min_words_per_segment),
maxval=math.log(max_words_per_segment),
seed=seeds[1],
)
),
tf.int32)
# Pad to a multiple of length, then use tf.reshape to split up the words
# into num_segments segments each of the given length.
num_segments = tf.cast(
tf.math.ceil(
tf.cast(n_words, tf.float32) / tf.cast(length, tf.float32)
),
tf.int32)
padding = num_segments * length - n_words
words = tf.pad(words, [[0, padding]])
words = tf.reshape(words, [-1, length])
# Finally, join with spaces and strip. The padding turns into a bunch of
# spaces that get stripped out.
words = tf.strings.reduce_join(words, axis=1, separator=' ')
return {text_key: tf.strings.strip(words)}
return my_fn(dataset).unbatch()
def split_text_to_words(dataset, text_key='text', min_num_words=2):
"""Split text to words and filter out examples with too few words."""
def split(x):
res = dict(x)
res['words'] = tf.strings.split([x[text_key]]).values
return res
dataset = dataset.map(split, num_parallel_calls=AUTOTUNE)
return dataset.filter(lambda x: tf.size(x['words']) >= min_num_words)
def fill_in_the_blank(dataset,
text_key='text',
label='fill: '):
"""Create a dataset consisting of fill-in-the-blank text examples.
The input examples should have a key text_key associated with a tf.string
value.
The output examples have keys 'inputs' and 'targets'.
The input string is split on whitespace to form a sequence of words.
This sequence is chopped randomly into segments of one or more words.
Alternate segments are included in the inputs and targets, with a special
word 'X' marking a missing segment.
The given label is prepended to the inputs. Each input string produces two
examples - one the inverse of the other. Inputs with less than two words
are dropped.
EXAMPLE:
input:
{
'text': 'The fat cat sat on the mat.'
}
outputs:
{
'inputs': 'fill: The fat X the X'
'targets': 'X cat sat on X mat.'
}
{
'inputs': 'fill: X cat sat on X mat.'
'targets': 'The fat X the X'
}
Args:
dataset: a tf.data.Dataset
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
label: a string, the label to prepend to the inputs.
Returns:
a tf.data.Dataset
"""
@seqio.map_over_dataset(num_seeds=3)
def my_fn(x, seeds):
"""Generates two preprocessed examples that are roughly inverses.
Args:
x: an example dict with text pre-split in `words` feature.
seeds: an int32 Tensor, shaped (3, 2), the random seeds.
Returns:
an example dict with two inputs and two targets, one for each resulting
preprocessed example.
"""
words = x['words']
n_words = tf.size(words)
# First select the break probability. We pick this on a log-uniform
# distribution between 1/(n_words + 1) and 1/2. This means that some
# sequences will be chopped roughly and others finely.
min_log_p_break = -tf.math.log(tf.cast(n_words, tf.float32) + 2.0)
max_log_p_break = -tf.math.log(2.0)
p_break = tf.exp(
tf.random.stateless_uniform(
[],
minval=min_log_p_break,
maxval=max_log_p_break,
seed=seeds[0])
)
# craffel@ says that there may be bugs in random.uniform making it not
# really uniform. This doesn't seem horribly important here, but may
# need another look.
breaks = tf.less(
tf.random.stateless_uniform([n_words - 1], seed=seeds[1]),
p_break)
def one_random_break():
pos = tf.random.stateless_uniform(
[],
minval=0,
maxval=n_words - 1,
dtype=tf.int32,
seed=seeds[2])
return tf.one_hot(pos, n_words - 1,
dtype=tf.bool, on_value=True, off_value=False)
breaks = tf.cond(
tf.math.reduce_any(breaks), lambda: breaks, one_random_break)
breaks = tf.concat([[True], breaks], axis=0)
word_to_seq_id = tf.math.mod(tf.math.cumsum(tf.cast(breaks, tf.int32)), 2)
# separators:
# if in your segment: ' '
# if break to other segment: ' X'
# else: ''
results = []
for seq_id in [0, 1]:
in_my_seq = tf.equal(word_to_seq_id, seq_id)
separator_strings = tf.where(
in_my_seq,
' ',
tf.where(breaks, ' X', '')
)
word_strings = tf.where(in_my_seq, words, '')
all_strings = tf.stack([separator_strings, word_strings], axis=1)
results.append(tf.strings.substr(
tf.strings.reduce_join(all_strings), 1, tf.int32.max))
inputs = tf.stack([tf.strings.join([label, results[0]]),
tf.strings.join([label, results[1]])])
targets = tf.stack([results[1], results[0]])
return {'inputs': inputs, 'targets': targets}
dataset = split_text_to_words(dataset, text_key, min_num_words=2)
return my_fn(dataset).unbatch()
def fill_in_the_blank_sized(
dataset,
size_bins=(1, 2, 4, 8, 16, 32, 64, 128, 256, 512),
text_key='text',
label='fill: '):
"""Fill in the blank preprocessor that labels blank with a binned size.
The actual blank size is sampled uniformly from the inclusive range of the min
and max bin. The blank is then filled in with the closest bin size to the
actual blank size.
Args:
dataset: a tf.data.Dataset, the dataset to preprocess.
size_bins: a list, a list of blank sizes to select from when labelling the
blank.
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
label: a string, the label to prepend to the inputs.
Returns:
a tf.data.Dataset
"""
bins = sorted(size_bins)
@seqio.map_over_dataset(num_seeds=2)
def my_fn(x, seeds):
"""Apply transformation."""
words = x['words']
n_words = tf.size(words)
blank_size = tf.random.stateless_uniform(
[],
minval=bins[0],
maxval=tf.math.minimum(n_words, bins[-1]),
dtype=tf.dtypes.int32,
seed=seeds[0])
bin_delta = tf.math.abs(bins - blank_size)
bin_ = tf.gather(bins, tf.argmin(bin_delta))
blank_start = tf.random.stateless_uniform(
[],
minval=0,
maxval=tf.math.maximum(0, n_words-blank_size) + 1,
dtype=tf.dtypes.int32,
seed=seeds[1])
pre_blank = tf.strings.reduce_join(words[0:blank_start], separator=' ')
post_blank = tf.strings.reduce_join(
words[blank_start+blank_size:], separator=' ')
blank = tf.strings.format('_{}_', bin_)
# We strip to handle cases where blank is at beginning or end.
input_ = tf.strings.strip(
tf.strings.join([pre_blank, blank, post_blank], ' '))
input_ = tf.strings.join([label, input_])
target = tf.strings.reduce_join(
words[blank_start:blank_start+blank_size], separator=' ')
return {
'inputs': tf.strings.strip(input_),
'targets': tf.strings.strip(target)}
dataset = split_text_to_words(dataset, text_key, min_num_words=2)
# Filter out examples with fewer words than the minimum.
dataset = dataset.filter(lambda x: tf.size(x['words']) >= bins[0])
return my_fn(dataset)
def neighboring_pairs(dataset, text_key='text', reuse_sentences=True):
"""Create a dataset consisting of neighboring sentence pairs.
The input examples should have a key text_key associated with a tf.string
value.
The output examples have keys 'first' and 'second'.
We only take sentence pairs from within the same line since lines seem to
represent paragraph-like structures in our text datasets. Empty lines and
1-sentence lines will thus be ignored.
The argument reuse_sentences determines whether a sentence can be used as both
the first and last element in the pair. For example, the input with sentences
A,B,C,D will return (A,B),(B,C),(C,D) if reuse_sentences is True and
(A,B),(C,D) if reuse_sentences is False.
Args:
dataset: a tf.data.Dataset
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
reuse_sentences: a boolean
Returns:
a tf.data.Dataset
"""
def split_by_lines(dataset):
"""Splits text in dataset by line, removing empty lines."""
def my_fn(text):
lines = tf.strings.split([text], sep='\n').values
return tf.strings.strip(lines)
dataset = dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
dataset = dataset.unbatch()
return dataset.filter(lambda x: tf.strings.length(x) > 0)
def split_into_pairs(line):
"""Split a given text example into pairs of neighboring sentences."""
# TODO(mmatena): Use better sentence segmentation.
sep = str(uuid.uuid4())
sentences = tf.strings.regex_replace(line, r'((?:\.|\!|\?)+)', r'\1' + sep)
sentences = tf.strings.strip(tf.strings.split([sentences], sep).values)
if reuse_sentences:
firsts = sentences[:-1]
seconds = sentences[1:]
else:
firsts = sentences[:-1:2]
seconds = sentences[1::2]
return {
'first': firsts,
'second': seconds,
}
def example_len(x):
return tf.math.minimum(
tf.strings.length(x['first']), tf.strings.length(x['second']))
# Split by lines.
dataset = dataset.map(lambda x: x[text_key], num_parallel_calls=AUTOTUNE)
dataset = split_by_lines(dataset)
# Get pairs of neighboring sentences.
dataset = dataset.map(split_into_pairs, num_parallel_calls=AUTOTUNE)
dataset = dataset.unbatch()
# Remove examples with empty strings.
dataset = dataset.filter(lambda x: example_len(x) > 0)
return dataset
@seqio.map_over_dataset
def glue(x, benchmark_name, label_names, feature_names=None, id_key='idx'):
"""Convert a dataset from glue to text2text examples.
This function uses the feature names from the dataset to unpack examples into
a format amenable for a text2text problem. For example, consider the Quora
Question Pairs (QQP) benchmark, which would suggest
benchmark_name="qqp"
label_names=['not_duplicate', 'duplicate']
For QQP, a typical example might look like
{
"question1": "Why do I easily get bored of my friends?",
"question2": "Why do I get bored of friends so quickly?",
"label": 1,
"idx": 10,
}
This example would be transformed to
{
"inputs": (
"qqp question1: Why do I easily get bored of my friends? question2: "
"Why do I get bored of my friends so quickly?"
),
"targets": "duplicate",
"idx": 10,
}
Args:
x: an example to process.
benchmark_name: the name of the GLUE benchmark for this dataset.
label_names: a list of label names corresponding to class index.
feature_names: an optional ordered list of feature names. If provided,
features will be ordered in this way in the output. If not provided, all
features (except 'idx' and 'label') will be used, sorted by name.
id_key: str, key for id in the dataset. If not provided, 'idx' will be used.
if None, no id will be added to the dataset.
Returns:
A preprocessed example.
"""
# If an ordering is not provided, sort feature keys to ensure a consistent
# order.
feature_keys = (
feature_names or sorted(set(x.keys()).difference(['label', 'idx'])))
# Pack keys (formatted as " key: ") and corresponding text feature
strs_to_join = []
for key in feature_keys:
strs_to_join.append('{}:'.format(key))
strs_to_join.append(x[key])
# Add benchmark name at the start
strs_to_join.insert(0, benchmark_name)
label_name = tf.cond(
# When no label is provided (label == -1), use "<unk>"
tf.equal(x['label'], -1),
lambda: tf.constant('<unk>'),
# Otherwise grab the label text from label_names
lambda: tf.gather(label_names, x['label']),
)
joined = tf.strings.join(strs_to_join, separator=' ')
ex = {}
if benchmark_name == 'multirc':
# Remove HTML markup.
joined = tf.strings.regex_replace(joined, '<br>', ' ')
joined = tf.strings.regex_replace(joined, '<(/)?b>', '')
# Store the data index in the returned example (used by eval)
ex['idx/paragraph'] = x['idx']['paragraph']
ex['idx/question'] = x['idx']['question']
ex['idx/answer'] = x['idx']['answer']
else:
# Store the data index in the returned example (used by eval)
if id_key:
ex['idx'] = x[id_key]
ex['inputs'] = joined
ex['targets'] = label_name
return ex
@seqio.map_over_dataset
def stsb(x):
"""Convert STSB examples to text2text format.
STSB maps two sentences to a floating point number between 1 and 5
representing their semantic similarity. Since we are treating all tasks as
text-to-text tasks we need to convert this floating point number to a string.
The vast majority of the similarity score labels in STSB are in the set
[0, 0.2, 0.4, ..., 4.8, 5.0]. So, we first round the number to the closest
entry in this set, and then we convert the result to a string (literally e.g.
"3.4"). This converts STSB roughly into a 26-class classification dataset.
This function uses the feature names from the dataset to unpack examples into
a format amenable for a text2text problem.
For example, a typical example from STSB might look like
{
"sentence1": "Three more US soldiers killed in Afghanistan",
"sentence2": "NATO Soldier Killed in Afghanistan",
"label": 1.8,
}
This example would be transformed to
{
"inputs": (
"stsb sentence1: Three more US soldiers killed in Afghanistan "
"sentence2: NATO Soldier Killed in Afghanistan"
),
"targets": "1.8",
}
Args:
x: an example to process.
Returns:
A preprocessed example.
"""
strs_to_join = [
'stsb sentence1:', x['sentence1'], 'sentence2:', x['sentence2']
]
label_string = tf.as_string(tf.round(x['label'] * 5) / 5, precision=1)
joined = tf.strings.join(strs_to_join, separator=' ')
return {'inputs': joined, 'targets': label_string, 'idx': x['idx']}
@seqio.map_over_dataset
def wsc(x):
"""Convert WSC examples to text2text format.
WSC includes a sentence along with 2 'spans': the first denoting a noun and
the other a pronoun. The 'label' specifies whether or not the pronoun is
referencing the noun. This preprocessor puts ' * ' around the noun and ' # '
around the pronoun.
For example, a typical example from WSC might look like
{
'text': 'This is a test sentence .',
'span1_text': 'test',
'span1_index': 3,
'span2_text': 'This',
'span2_index': 0,
'label': 0
}
This example would be transformed to
{
'inputs': 'wsc text: # This # is a * test * sentence .',
'targets': 'False'
}
Args:
x: an example to process.
Returns:
A preprocessed example.
"""
def _mark_span(text, span_str, span_idx, mark):
pattern_tmpl = r'^((?:\S+\s){N})(W)'
pattern = tf.strings.regex_replace(pattern_tmpl, 'N',
tf.as_string(span_idx))
pattern = tf.strings.regex_replace(pattern, 'W', span_str)
return tf.strings.regex_replace(text, pattern, r'\1{0} \2 {0}'.format(mark))
text = x['text']
text = _mark_span(text, x['span1_text'], x['span1_index'], '*')
# Compensate for 2 added "words" added in previous step.
span2_index = x['span2_index'] + 2 * tf.cast(
x['span1_index'] < x['span2_index'], tf.int32)
text = _mark_span(text, x['span2_text'], span2_index, '#')
# Add benchmark name at the start
strs_to_join = ['wsc', 'text:', text]
label_name = tf.cond(
# When no label is provided (label == -1), use "<unk>"
tf.equal(x['label'], -1),
lambda: tf.constant('<unk>'),
# Otherwise use False/True.
lambda: tf.gather(['False', 'True'], x['label']))
joined = tf.strings.join(strs_to_join, separator=' ')
return {'inputs': joined, 'targets': label_name, 'idx': x['idx']}
@gin.configurable
def record(dataset):
"""Convert ReCoRD examples to text2text examples.
ReCoRD contains a passage, query containing a '@placeholder' string, and a set
of entities that are the possible values of the placeholder. Each train and
validation example will have a list of answers, any of which would be
considered correct.
For example, a typical example from ReCoRD might look like
{
'passsage': 'This is the passage.',
'query': 'A @placeholder is a bird.',
'entities': ['penguin', 'potato', 'pigeon'],
'answers': ['penguin', 'pigeon'],
}
which this preprocessor would turn into the following two examples:
{
'inputs': 'record query: A @placeholder is a bird. entities: penguin, '
'potato, pigeon passage: This is the passage.',
'targets': 'penguin',
}
and
{
'inputs': 'record query: A @placeholder is a bird. entities: penguin, '
'potato, pigeon passage: This is the passage.',
'targets': 'potato',
}
Args:
dataset: a tf.data.Dataset to process.
Returns:
a tf.data.Dataset
"""
def process_answers(x):
"""Helper fn to get one example per answer."""
ex = x.copy()
num_answers = tf.size(ex['answers'])
def duplicate_along_first_dim(t):
n_duplicates = tf.math.maximum(num_answers, 1)
return tf.broadcast_to(
t, shape=tf.concat([[n_duplicates], tf.shape(t)], axis=0))
for k, v in x.items():
if k != 'idx':
ex[k] = duplicate_along_first_dim(v)
ex['targets'] = tf.cond(
tf.greater(num_answers, 0), lambda: x['answers'],
lambda: tf.constant(['<unk>']))
ex['idx'] = {
'passage': duplicate_along_first_dim(x['idx']['passage']),
'query': duplicate_along_first_dim(x['idx']['query']),
}
return ex
def my_fn(x):
"""Converts the processed example to text2text strings."""
passage = x['passage']
passage = tf.strings.regex_replace(passage,
r'(\.|\?|\!|\"|\')\n@highlight\n',
r'\1 ')
passage = tf.strings.regex_replace(passage, r'\n@highlight\n', '. ')
strs_to_join = [
'record query:', x['query'], 'entities:',
tf.strings.reduce_join(x['entities'], separator=', '), 'passage:',
passage
]
joined = tf.strings.join(strs_to_join, separator=' ')
ex = {}
# Store the data index in the returned example (used by eval)
ex['idx/passage'] = x['idx']['passage']
ex['idx/query'] = x['idx']['query']
ex['inputs'] = joined
# Note that "answers" has been converted to a single string by the
# process_answers function.
ex['targets'] = x['targets']
# Pass-through full list of answers for eval
ex['answers'] = x['answers']
return ex
dataset = dataset.map(process_answers, num_parallel_calls=AUTOTUNE)
dataset = dataset.unbatch()
return dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
def multi_translate(dataset, source_language, target_language):
"""Convert a multi-translate dataset to a text2text pair.
For example, say the dataset returns examples which have a 'translations'
feature key so that examples have the following format:
{
...
'translations': {
'language': ['de', 'fr', 'en'],
'translation': ['Das ist gut.', 'Ca c'est bon', 'That is good.']
},
...
}
If source_language = 'de', target_language = 'en', then this function will
return examples of the format:
{'inputs': 'translate German to English: Das is gut.',
'targets': 'That is good.'}
Any other languages present in the dataset will be filtered out.
Args:
dataset: a tf.data.Dataset to process.
source_language: source language code (e.g. 'en') to translate from.
target_language: target language code (e.g. 'de') to translate to.
Returns:
A preprocessed tf.data.Dataset with the format listed above.
"""
def filter_fn(x):
langs = x['translations']['language']
# Test whether both source/target_language appear in the language list
source_in_langs = tf.reduce_any(tf.equal(source_language, langs))
target_in_langs = tf.reduce_any(tf.equal(target_language, langs))
return tf.logical_and(source_in_langs, target_in_langs)
def map_fn(x):
langs = x['translations']['language']
# Retrieve the index in langs where source/target_language appears
src_idx = tf.squeeze(tf.where(tf.equal(langs, source_language)))
tgt_idx = tf.squeeze(tf.where(tf.equal(langs, target_language)))
return {
source_language: x['translations']['translation'][src_idx],
target_language: x['translations']['translation'][tgt_idx],
}
dataset = dataset.filter(filter_fn)
dataset = dataset.map(map_fn, num_parallel_calls=AUTOTUNE)
return translate(dataset, source_language, target_language)
@seqio.map_over_dataset
def definite_pronoun_resolution_simple(x, label='wsc:'):
"""Converts DPR examples to a simple text to text format.
A typical example from the definite pronoun resolution dataset might look like
{
'sentence': 'Bob asked Tom if he can lend some money.',
'pronoun': 'he',
'candidates': ['Bob', 'Tom'],
'label': 1,
}
This will be transformed to
{
'inputs': 'wsc: Bob asked Tom if *he* can lend some money.'
'targets': 'Tom',
}
Args:
x: an example to process.
label: a string, the label to prepend to the inputs.
Returns:
A preprocessed example.
"""
# If there are multiple instances of the pronoun in the sentence, the first
# one is the one that needs to be resolved.
inputs = [
label,
tf.strings.regex_replace(
x['sentence'],
tf.strings.join([r' (', x['pronoun'], r')( |\.|,)']),
r' *\1*\2',
replace_global=False,
),
]
return {
'inputs': tf.strings.join(inputs, separator=' '),
'targets': x['candidates'][x['label']],
}
def next_sentence_prediction(dataset,
text_key='text',
reuse_sentences=True,
label_sentences=False,
p_neighbors=0.5,
label='nsp: ',
buffer_size=50000):
"""Create a dataset containing a next sentence prediction objective.
The input examples should have a key text_key associated with a tf.string
value.
The output examples have keys 'inputs' and 'targets'.
EXAMPLE OUTPUTS:
{
input: "nsp: sentence1: The man went to the store. sentence2: Penguins are "
"flightless birds.",
target: "not_next"
}
The "sentence1:" and "sentence2:" labels will be omitted if label_sentences is
False.
Args:
dataset: a tf.data.Dataset
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
reuse_sentences: a boolean, see docs for `neighboring_pairs` for more info.
label_sentences: a boolean
p_neighbors: a float between 0 and 1, the probability that a sentence pair
will be neighbors.
label: a string, the label to prepend to the inputs.
buffer_size: an int, the size of the shuffle buffer used to get
non-neighboring sentences.
Returns:
a tf.data.Dataset
"""
sentence1_label, sentence2_label = '', ''
if label_sentences:
sentence1_label, sentence2_label = 'sentence1: ', 'sentence2: '
empty = tf.constant('', dtype=tf.string, shape=[1])
dataset = neighboring_pairs(
dataset, text_key=text_key, reuse_sentences=reuse_sentences)
dataset = dataset.shuffle(buffer_size).batch(2, drop_remainder=True)
def some_are_empty(*tensors):
"""See if at least one tensor has shape [0]."""
empty = [tf.equal(tf.size(t), 0) for t in tensors]
return tf.reduce_any(empty)
@seqio.map_over_dataset(num_seeds=1)
def my_fn(x, seed):
"""Function to be applied to each example in dataset."""
use_neighbors = (
tf.random.stateless_uniform(shape=[], seed=seed) < p_neighbors
)
firsts, seconds = tf.cond(
use_neighbors,
lambda: (x['first'], x['second']),
lambda: (x['first'], tf.stack([x['second'][1], x['second'][0]])),
)
relation_label = tf.cond(
use_neighbors,
lambda: 'next',
lambda: 'not_next',
)
inputs = []
for i in range(2):
first_inputs = firsts[i]
second_inputs = seconds[i]
def create_examples(first_i=first_inputs, second_i=second_inputs):
return tf.strings.join([
label,
sentence1_label,
first_i,
' ',
sentence2_label,
second_i,
])
inpt = tf.cond(
some_are_empty(first_inputs, second_inputs),
lambda: empty,
create_examples,
)
inputs.append(tf.strings.strip(inpt))
inputs = tf.reshape(inputs, [-1])
targets = tf.reshape(2 * [relation_label], [-1])
return {'inputs': inputs, 'targets': targets}
dataset = my_fn(dataset).unbatch()
def example_len(x):
return tf.math.minimum(
tf.strings.length(x['inputs']), tf.strings.length(x['targets']))
# Remove examples with empty strings.
return dataset.filter(lambda x: example_len(x) > 0)
@seqio.map_over_dataset
def lm(x):
"""Basic language modeling objective for text - empty inputs.
Given inputs with the format:
{"text": "Here is some text."}
This preprocess produces examples with the format
{"inputs": "", "targets": "Here is some text."}
Args:
x: an example to process.
Returns:
A preprocessed example.
"""
return {'inputs': '', 'targets': x['text']}
def _wsc_inputs(x):
"""Given an example from SuperGLUE WSC, compute the 'inputs' value.
The output will look like a fill in the blank with the pronoun blanked out.
For example, the text
'Mitchell asked Tom if he could lend some money.'
would be transformed to
'Mitchell asked Tom if X could lend some money.'
Args:
x: A dict that is an example from the WSC task of SuperGLUE.
Returns:
A scalar string tensor.
"""
words = tf.strings.split([x['text']], sep=' ').values
# We would need some special logic to handle the case where the pronoun is the
# first or last word in the text. None of the examples in WSC seem to have
# this, so we are ignoring these cases.
with tf.control_dependencies([
tf.assert_greater(x['span2_index'], 0),
tf.assert_less(x['span2_index'], tf.size(words)),
]):
pronoun_index = tf.identity(x['span2_index'])
def create_input():
with tf.control_dependencies(
[tf.assert_equal(words[pronoun_index], x['span2_text'])]):
return tf.strings.join(
[
tf.strings.reduce_join(words[:pronoun_index], separator=' '),
'X',
tf.strings.reduce_join(
words[pronoun_index + 1:], separator=' '),
],
separator=' ',
)
# Handle some special cases.
if tf.equal(
x['text'],
'The boy continued to whip the pony , and eventually the pony threw him over. John laughed out quite loud. \"Good for him,\" he said. '
):
return (
'The boy continued to whip the pony , and eventually the pony threw '
'him over. John laughed out quite loud. "Good for X ," he said.'
)
# Using the span2_index, we get 'use' instead of 'it'.
if tf.equal(
x['text'],
'When they had eventually calmed down a bit , and had gotten home, Mr. Farley put the magic pebble in an iron safe . Some day they might want to use it , but really for now, what more could they wish for?'
):
return (
'When they had eventually calmed down a bit , and had gotten home, '
'Mr. Farley put the magic pebble in an iron safe . Some day they might '
'want to use X , but really for now, what more could they wish for?'
)
return create_input()
def wsc_simple(dataset,
label='wsc:',
correct_referent_only=False):
"""Converts SuperGLUE WSC examples to a simple text to text format.
A typical example from SuperGLUE WSC might look like
{
'text': 'Mitchell asked Tom if he could lend some money.',
'span1_text': 'Tom',
'span2_text': 'he',
'span2_index': 4,
}
This will be transformed to
{
'inputs': 'wsc: Bob asked Tom if *he* can lend some money.'
'targets': 'Tom',
}
The targets will always be the text of the referent regardless of whether it
is the correct referrent of the pronoun. Thus for training purposes, please
set `correct_referent_only` to be True.
Args:
dataset: a tf.data.Dataset
label: a string, the label to prepend to the inputs.
correct_referent_only: a bool, whether to filter out examples for which the
targets is not the correct referent of the pronoun.
Returns:
a tf.data.Dataset
"""
def map_fn(x):
"""Function to be called for every example in dataset."""
inputs = [
label,
tf.strings.regex_replace(
_wsc_inputs(x), r' X ', ' *' + x['span2_text'] + '* '),
]
referent = x['span1_text']
return {
'inputs': tf.strings.join(inputs, separator=' '),
# The reshape is necessary as otherwise the tensor has unknown rank.
'targets': tf.reshape(referent, shape=[]),
'label': x.get('label', 0),
'idx': x['idx'],
}
if correct_referent_only:
dataset = dataset.filter(lambda x: tf.cast(x.get('label', False), tf.bool))
return dataset.map(map_fn, num_parallel_calls=AUTOTUNE)
@seqio.map_over_dataset
def wnli_simple(x, label='wsc:'):
"""Converts GLUE WNLI examples to a simple text to text format.
A typical example from WNLI might look like:
{
'sentence1': 'The fish ate the worm. It was tasty.',
'sentence2': 'The worm was tasty.',
'label': 1,
}
This will be transformed to:
{
'inputs': 'wsc: The fish ate the worm. *It* was tasty.',
'targets': 'The worm',
'premise': 'The fish ate the worm. It was tasty.,
'hypothesis': 'The worm was tasty.',
'label': 1,
}
This preprocessor has been manually verified to produce reasonable WSC
examples for the dev and test sets. Tasks using this preprocessor should only
be used eval and not train.
Args:
x: an example to process.
label: a string, the label to prepend to the inputs.
Returns:
A preprocessed example.
"""
pronouns = ['he', 'she', 'they', 'it', 'her', 'his', 'their', 'them', 'him']
PronounMatch = collections.namedtuple( # pylint: disable=invalid-name
'PronounMatch', ['score', 'index_in_premise', 'candidate'])
def split_clean(s):
"""Returns array of words with punctuation and capitalization removed."""
words = [
re.sub(r'(\.|,|\?|\!)$', '', w) for w in s.strip().lower().split(' ')
]
return [w for w in words if w]
def get_all_pronoun_indices(s):
return [i for i, w in enumerate(s) if w in pronouns]
def get_post_match_size(hypothesis, words):
"""Returns len of largest prefix of words that is substr of hypothesis."""
hypothesis = ' '.join(hypothesis)
for i in range(len(words)):
if ' '.join(words[:i + 1]) not in hypothesis:
return i
return len(words)
def get_pre_match_size(hypothesis, words):
"""Returns len of largest suffix of words that is substr of hypothesis."""
return get_post_match_size(hypothesis[::-1], words[::-1])
def get_pronoun_match(premise, hypothesis, index):
"""Return the PronounMatch for the pronoun at `index` in premise."""
pre, post = premise[:index], premise[index + 1:]
pre_match_size = get_pre_match_size(hypothesis, pre)
post_match_size = get_post_match_size(hypothesis, post)
score = pre_match_size + post_match_size
candidate = ''
if score:
pre_match = pre[-pre_match_size or len(pre):]
post_match = post[:post_match_size]
m = re.search(' '.join(pre_match + [r'(.+)'] + post_match),
' '.join(hypothesis))
if not m:
# Handle cases where the candidate is at the start of the hypthesis.
m = re.search(' '.join([r'^(.+)'] + post_match), ' '.join(hypothesis))
if not m:
# Handle cases where the candidate is at the end of the hypthesis.
m = re.search(' '.join(pre_match + [r'(.+)$']), ' '.join(hypothesis))
if m:
candidate = m.group(1)
return PronounMatch(
score=score, index_in_premise=index, candidate=candidate)
def get_best_pronoun_match(premise, hypothesis):
"""Returns the match for the pronoun in the premise to disambiguate."""
pronoun_indices = get_all_pronoun_indices(premise)
scoredpronouns = [
get_pronoun_match(premise, hypothesis, index)
for index in pronoun_indices
]
return max(scoredpronouns, key=lambda x: x.score)
def highlight(sentence, index):
words = sentence.split(' ')
word = words[index]
if word[-1] in ['.', ',', '!', '?']:
highlighted = '*{}* {}'.format(word[:-1], word[-1])
else:
highlighted = '*{}*'.format(word)
return ' '.join(words[:index] + [highlighted] + words[index + 1:])
def make_nonpossessive(word):
# WSC simple targets will never be possessive, even when the pronoun is
# possesive.
if word.endswith("'"):
return word[:-1]
elif word.endswith("'s"):
return word[:-2]
else:
return word
def clean_up(candidate):
words = candidate.split(' ')
# Sometimes the candidate extraction messes up, and the candidate will start
# with the start of the hypothesis and extend to the correct candidate. We
# can try to clean up the candidate in some cases by removing everything up
# to the last article in the sentence.
article_index = max(
[words.index(art) for art in {'a', 'an', 'the'} if art in words] or [0])
return ' '.join(words[article_index:])
def process_candidate(candidate, hypothesis):
"""Handles special cases and adds proper punctuation/capitalization."""
candidate = clean_up(candidate)
pattern = '({})'.format(' '.join([
r'{}(?:\.|,|\?|\!)?'.format(re.escape(c)) for c in candidate.split(' ')
]))
m = re.search(pattern, hypothesis, re.IGNORECASE)
if not m:
raise ValueError(
'Unable to find candidate "{}" in hypothesis "{}".'.format(
candidate, hypothesis))
candidate = m.group(1)
if candidate and candidate[-1] in ['.', ',', '!', '?']:
candidate = candidate[:-1]
return make_nonpossessive(candidate)
def compute_inputs_and_targets(premise, hypothesis):
"""Compute inputs and targets for WNLI simple."""
premise = tf.compat.as_text(premise.numpy())
hypothesis = tf.compat.as_text(hypothesis.numpy())
match = get_best_pronoun_match(
split_clean(premise), split_clean(hypothesis))
targets = process_candidate(match.candidate, hypothesis)
inputs = '{} {}'.format(label, highlight(premise, match.index_in_premise))
return inputs, targets
inputs, targets = tf.py_function(
compute_inputs_and_targets,
inp=[x['sentence1'], x['sentence2']],
Tout=[tf.string, tf.string])
return {
# The reshape is necessary as otherwise the tensor has unknown rank.
'inputs': tf.reshape(inputs, shape=[]),
'targets': tf.reshape(targets, shape=[]),
'premise': x['sentence1'],
'hypothesis': x['sentence2'],
'label': x.get('label', 0),
'idx': x['idx'],
}
def rank_classification(
ds: tf.data.Dataset,
inputs_fn: Callable[[FeatureType], tf.Tensor],
targets_fn: Callable[[FeatureType], tf.Tensor],
is_correct_fn: Callable[[FeatureType], tf.Tensor],
weight_fn: Optional[Callable[[FeatureType], tf.Tensor]] = None,
mode: str = 'eval',
passthrough_feature_keys: Optional[Sequence[str]] = None,
) -> tf.data.Dataset:
"""Prepare dataset for rank classification scoring.
Intended to be used with `rank_classification` postprocessor and metric.
`inputs_fn` and `targets_fn` must return the 'inputs' and 'targets' features,
respectively, for each possible class label given the raw example features.
'is_correct_fn' must return the 'is_correct' feature, a boolean for whether
each label is correct.
In 'train' mode, only the inputs / targets marked correct will be produced.
In 'eval' mode, all inputs / targets will be produced.
In 'fewshot_eval', all inputs / targets will be produced as a single batch.
Each output example will also be given a unique 'idx' feature. The first dim
is a sequential index for the input example and the second is the index of the
generated output for it. E.g., the second output example from the fourth input
example would be `[3, 1]`.
To be clear, consider the following arguments:
inputs_fn=lambda ex: ex['prefix'],
targets_fn=lambda ex: ex['suffix'],
is_correct_fn=lambda ex: tf.one_hot(ex['label'], num_classes)
weight_fn=lambda ex: ex['weight']
Given the following example:
{
'prefix': ['The farmland needed ', 'The farmland wanted '],
'suffix': ['water', 'cows'],
'label': 0,
'weight': 1.0,
}
the preprocessor would return:
[{
'idx': [0, 0],
'inputs': 'The farmland needed ',
'targets': 'water',
'is_correct': True,
'weight': 1.0
},
{
'idx': [0, 1],
'inputs': 'The farmland wanted ',
'targets': 'cows',
'is_correct': False,
'weight': 1.0
}]
With mode set to 'train', it would return only the first example,
since it uses the correct label. With mode set to 'fewshot_eval', it would
return both examples in a single batch.
Args:
ds: a tf.data.Dataset to preprocess.
inputs_fn: a callable that returns the 'inputs' features for each label
given the input example.
targets_fn: a callable that returns the 'targets' features for each label
given the input example.
is_correct_fn: a callable that returns the 'label' feature. May be an int32
scalar or 1-D Tensor.
weight_fn: a callable that returns the 'weight' feature (float32 scalar).
mode: A string, one of 'train' or'eval 'train' produces only the correct
example(s) based on the label value(s). 'eval' produces an example for
every possible class value, sequentially. 'fewshot_eval' produces an
example for every possible class value, batched together for each input
example.
passthrough_feature_keys: a sequence of feature names that should be passed
through to the output of this preprocessor. eg: ["starburst", "tokens"]
Returns:
A tf.data.Dataset containing 'idx', inputs', 'targets', and 'is_correct'.
"""
if mode not in ('train', 'eval', 'fewshot_eval'):
raise ValueError(
"Mode must be one of 'train', 'eval', or 'fewshot_eval'. "
f"Got '{mode}'.")
def make_examples(idx, ex):
inputs = inputs_fn(ex)
targets = targets_fn(ex)
is_correct = tf.cast(is_correct_fn(ex), tf.bool)
tf.debugging.assert_equal(
tf.size(is_correct), [tf.size(inputs), tf.size(targets)],
'`inputs_fn`, `targets_fn`, and `is_correct_fn` must return the same '
'size tensors.')
num_out = tf.size(is_correct)
in_idx = tf.fill([num_out], tf.cast(idx, tf.int32))
out_idx = tf.range(num_out)
output = {
'idx': tf.stack([in_idx, out_idx], 1),
'inputs': inputs,
'targets': targets,
'is_correct': is_correct,
}
if passthrough_feature_keys is not None:
for feature_name in passthrough_feature_keys:
output[feature_name] = [ex[feature_name]] * len(targets)
if weight_fn is not None:
output['weight'] = tf.fill(tf.shape(is_correct), weight_fn(ex))
output['weight'] = tf.cast(output['weight'], tf.float32)
return output
ds = ds.enumerate()
ds = ds.map(make_examples, num_parallel_calls=AUTOTUNE)
if mode != 'fewshot_eval':
ds = ds.unbatch()
if mode == 'train':
ds = ds.filter(lambda ex: ex['is_correct'])
return ds
def rank_classification_formatter(
ds: tf.data.Dataset,
inputs_formats: Union[str, Sequence[str]],
targets_formats: Union[str, Sequence[str]],
mode: str = 'eval',
label_key: str = 'label',
weight_key: Optional[str] = None) -> tf.data.Dataset:
"""Create 'inputs' and 'targets' strings for ranking classification.
Intended to be used with `rank_classification` postprocessor and metric.
Inputs will be formatted by filling in the feature values in the
`inputs_formats` and `targets_formats` strings.
Nested features can be accessed by concatenating the features using forward
slash. For eg: if sub-sub-key is nested under sub-key, which is nested under
key, then sub-sub-key can be accessed using key/sub-key/sub-sub-key.
In 'eval' mode, a separate example will be produced for each targets / inputs
format string. These can then be scored to find the one with the highest
likelihood. The `rank_classification` postprocessor and metric allow you to
evaluate with this technique.
In 'train' mode, only the targets / inputs format string indexed by the
label(s) will be produced. In 'eval' mode, all inputs / targets will be
produced.
Each input example will also be given a unique, sequential index called 'idx'.
For example, with arguments:
```
inputs_format='{premise} What is the {question}? X',
targets_formats=[
'I think {choice1}.',
'I think {choice2}.'
],
mode='eval'
```
given the input:
{
'premise': 'The farmland needed irrigation.',
'question': 'effect',
'choice1' : 'a canal was constructed',
'choice2': 'the crops grew tall',
'label': 0,
}
the preprocessor would return:
[{
'idx': 0,
'inputs': 'The farmland needed irrigation. What is the effect? X',
'targets': 'I think a canal was constructed.',
'is_correct': True
},
{
'idx': 0,
'inputs': 'The farmland needed irrigation. What is the effect? X',
'targets': 'I think the crops grew tall.',
'is_correct': False
}]
With `mode='train'`, it would return only the first example,
since it uses the correct label.
With `mode='fewshot_eval'`, it would return both examples in a single batch.
Args:
ds: a tf.data.Dataset to preprocess.
inputs_formats: A string or a list of strings to format with feature values
to produce 'inputs'. Feature keys should be surrounded by curly braces to
be replaced.
targets_formats: A string or a list of strings to format with feature values
to produce 'targets', one for each possible class value. Feature keys
should be surrounded by curly braces to be replaced.
mode: A string, one of 'train', 'eval', or 'fewshot_train') 'train' produces
only the correct example(s) based on the label value(s). 'eval' produces
an example for every possible class value, sequentially.
'fewshot_eval': produces an example for every possible class value,
batched together for each input example.
label_key: A string, the feature key for the integer label value(s).
weight_key: A string, the feature key for the float example weight.
Returns:
A tf.data.Dataset containing 'idx', inputs', 'targets', and 'is_correct'.
"""
if (isinstance(inputs_formats, (list, tuple)) and
isinstance(targets_formats, (list, tuple))):
if len(inputs_formats) != len(targets_formats):
raise ValueError(
f'The inputs_formats ({len(inputs_formats)}) and '
f'targets_formats ({len(targets_formats)}) are both instances '
'of list or tuple, but do not have matching lengths.')
elif isinstance(inputs_formats, (list, tuple)):
num_classes = len(inputs_formats)
targets_formats = [targets_formats] * num_classes
elif isinstance(targets_formats, (list, tuple)):
num_classes = len(targets_formats)
inputs_formats = [inputs_formats] * num_classes
else:
raise ValueError(
'One of the inputs_formats and targets_formats has to '
f'be a list or tuple, inputs_formats: {inputs_formats}, '
f'target_formats: {targets_formats}.')
def _format_str(features, fmt):
keys = set(re.findall(r'{(\S+)}', fmt))
s = fmt
for k in keys:
value = features
for subkey in k.split('/'):
value = value[subkey]
if not isinstance(value, tf.Tensor):
raise ValueError(
f'Final value of key \'{k}\' must be a tf.string. '
f'Got: {type(value).__name__}')
tf.debugging.assert_type(
value, tf.string,
f'Final value of key \'{k}\' must be a tf.string. '
f'Got: {value.dtype.name}')
s = tf.strings.regex_replace(s, '{%s}' % k, value)
return s
def _apply_formats(features, fmts):
return [_format_str(features, fmt) for fmt in fmts]
def _is_correct_fn(ex):
labels = ex[label_key]
is_correct = tf.one_hot(labels, num_classes, on_value=True, off_value=False)
if labels.shape.rank:
is_correct = tf.math.reduce_any(is_correct, axis=0)
return is_correct
def _weight_fn(ex):
return ex[weight_key]
return rank_classification(
ds,
inputs_fn=functools.partial(_apply_formats, fmts=inputs_formats),
targets_fn=functools.partial(_apply_formats, fmts=targets_formats),
is_correct_fn=_is_correct_fn,
weight_fn=None if weight_key is None else _weight_fn,
mode=mode)
@seqio.map_over_dataset
def parse_tsv(line, field_names=None, field_delim='\t'):
"""Splits TSV lines into dict examples mapping field name to string value.
Args:
line: an example containing a comma/tab-delimited string.
field_names: a list of strings, the ordered names of the TSV fields.
Defaults to "inputs" and "targets".
field_delim: a string, the delimiter to split on e.g. ',' for csv.
Returns:
A feature dict mapping field name to string value.
"""
field_names = field_names or ['inputs', 'targets']
return dict(
zip(field_names,
tf.io.decode_csv(
line,
record_defaults=[''] * len(field_names),
field_delim=field_delim,
use_quote_delim=False)))
@seqio.map_over_dataset
def preprocess_tsv(line,
field_delim='\t',
num_fields=2,
inputs_format='{0}',
targets_format='{1}',
field_names=None):
r"""Parse tab-delimited strings into inputs and targets.
This function takes a tf.data.Dataset of strings, each of which contains
tab-delimited fields. The function returns a tf.data.Dataset of feature
dictionaries of the form {"inputs": string, "targets": string}.
inputs_format contains a template string and field numbers or names used to
produce the "inputs" string.
targets_format contains a template string and field numbers or names used to
produce the "targets" string.
Example (field numbers):
The input dataset contains the lines:
"6,7,42"
"2,9,18"
preprocess_tsv(dataset,
field_delim=',',
inputs_format='numerator: {2} denominator: {1}',
targets_format='quotient: {0}'
would produce a dataset containing the dictionaries:
{"inputs": "numerator: 42 denomnator: 7", "targets": "quotient: 6"}
{"inputs": "numerator: 18 denomnator: 9", "targets": "quotient: 2"}
Example (field names):
The input dataset contains the lines:
"6,7,42"
"2,9,18"
preprocess_tsv(dataset,
field_delim=',',
field_names=['quot', 'denom', 'numer'],
inputs_format='numerator: {numer} denominator: {denom}',
targets_format='quotient: {quot}'
would produce a dataset containing the dictionaries:
{"inputs": "numerator: 42 denominator: 7", "targets": "quotient: 6"}
{"inputs": "numerator: 18 denominator: 9", "targets": "quotient: 2"}
Args:
line: an example containing comma/tab-delimited string.
field_delim: a string, the delimiter to split on e.g. ',' for csv.
num_fields: an integer
inputs_format: a string, the desired output format with placeholders for
field values.
targets_format: a string, the desired output format with placeholders for
field values.
field_names: a list of strings, the ordered names of the TSV fields.
defaults to None (i.e. use field number in *_format)
Returns:
A feature dict with 'inputs' and 'targets' features.
"""
def _format_part_with_field_numbers(part, field_values):
found = re.findall(r'{(\d)}', part)
if found:
return field_values[int(found[0])]
else:
return part
def _format_part_with_field_names(part, field_names, field_values):
field_names_re = '|'.join(['{{({})}}'.format(x) for x in field_names])
found = re.findall(field_names_re, part)
if found:
pos = field_names.index(''.join(found[0]))
return field_values[int(pos)]
else:
return part
def _format(format_string, field_names, field_values):
if field_names is None:
parts = [
_format_part_with_field_numbers(p, field_values)
for p in re.split(r'({\d})', format_string)
]
else:
field_names_re = '(' + '|'.join(['{{{}}}'.format(x) for x in field_names
]) + ')'
parts = [
_format_part_with_field_names(p, field_names, field_values)
for p in re.split(field_names_re, format_string)
]
return tf.strings.join(parts)
field_values = tf.io.decode_csv(
line,
record_defaults=[''] *
(num_fields if field_names is None else len(field_names)),
field_delim=field_delim,
use_quote_delim=False)
return {
'inputs': _format(inputs_format, field_names, field_values),
'targets': _format(targets_format, field_names, field_values)
}
# ======================Token Preprocessors=====================================
# TODO(adarob): Add a test.
def span_corruption(dataset,
sequence_length,
output_features,
mean_noise_span_length=3.0,
noise_density=0.15,
input_feature_key='inputs',
merge_examples_to_reduce_padding=True,
reserved_for_packing=None):
"""Final pretraining objective used in Raffel et al., 2019.
Args:
dataset: A tf.data.Dataset with dictionaries containing the key
`input_feature_key`.
sequence_length: dict mapping of feature key to int length for that feature.
output_features: mapping of keys to features.
mean_noise_span_length: the mean number of tokens per masked span per
example.
noise_density: what fraction of the tokens to mask.
input_feature_key: which feature to use from the dataset as the input text
tokens.
merge_examples_to_reduce_padding: if True, combines multiple input examples
to reduce padding.
reserved_for_packing: if specified, reduces the desired inputs length by the
specified amount to enable multiple examples to be packed together
downstream.
Returns:
a dataset
"""
inputs_length = sequence_length[input_feature_key]
if reserved_for_packing:
inputs_length -= reserved_for_packing
input_length, targets_length = random_spans_helper(
extra_tokens_per_span_inputs=1,
extra_tokens_per_span_targets=1,
inputs_length=inputs_length,
mean_noise_span_length=mean_noise_span_length,
noise_density=noise_density)
if sequence_length['targets'] < targets_length:
raise ValueError(
f'Expected targets length for span corruption ({targets_length}) is '
f'greater than configured targets length '
f"({sequence_length['targets']})")
ds = dataset
ds = select_random_chunk(
ds,
output_features=output_features,
feature_key='targets',
max_length=65536)
if merge_examples_to_reduce_padding:
ds = reduce_concat_tokens(ds, feature_key='targets', batch_size=128)
ds = split_tokens(
ds,
feature_key='targets',
min_tokens_per_segment=None,
max_tokens_per_segment=input_length)
ds = denoise(
ds,
output_features,
inputs_fn=noise_span_to_unique_sentinel,
targets_fn=nonnoise_span_to_unique_sentinel,
noise_density=noise_density,
noise_mask_fn=functools.partial(
random_spans_noise_mask,
mean_noise_span_length=mean_noise_span_length),
input_feature_key=input_feature_key)
return ds
# TODO(adarob): Add a test.
def iid_denoising(dataset, sequence_length, output_features):
"""Baseline pretraining objective used in Raffel et al., 2019."""
ds = dataset
ds = select_random_chunk(ds, output_features=output_features,
feature_key='targets', max_length=65536)
ds = reduce_concat_tokens(ds, feature_key='targets', batch_size=128)
ds = split_tokens_to_inputs_length(ds, output_features=output_features,
sequence_length=sequence_length)
ds = denoise(
ds,
output_features,
inputs_fn=noise_span_to_unique_sentinel,
targets_fn=nonnoise_span_to_unique_sentinel,
noise_density=0.15,
noise_mask_fn=iid_noise_mask
)
return ds
def prefix_lm(dataset, sequence_length, output_features):
"""Prefix language modeling objective used in Raffel et al. 2019."""
ds = dataset
ds = select_random_chunk(ds, output_features=output_features,
feature_key='targets', max_length=65536)
ds = split_tokens_to_inputs_length(ds, output_features=output_features,
sequence_length=sequence_length)
ds = denoise(
ds,
output_features,
inputs_fn=drop_nonnoise_tokens,
targets_fn=drop_noise_tokens,
noise_density=0.5,
noise_mask_fn=random_prefix_noise_mask,
)
return ds
def full_lm(dataset, sequence_length, output_features):
"""Full language modeling objective with EOS only at document boundaries."""
ds = dataset
ds = select_random_chunk(ds, output_features=output_features,
feature_key='targets', max_length=65536)
ds = seqio.preprocessors.append_eos(ds, output_features)
ds = reduce_concat_tokens(ds, feature_key='targets', batch_size=128)
# Don't use `split_tokens_to_targets_length` since we've alrady added EOS.
ds = split_tokens(ds, max_tokens_per_segment=sequence_length['targets'])
return ds
@gin.configurable
def select_random_chunk(dataset: tf.data.Dataset,
output_features: Mapping[str, seqio.Feature],
max_length: Optional[int] = None,
feature_key: str = 'targets',
additional_feature_keys: Optional[Sequence[str]] = None,
passthrough_feature_keys: Optional[
Sequence[str]] = None,
sequence_length: Optional[Mapping[str, int]] = None,
uniform_random_start: bool = False,
min_length: Optional[int] = None,
**unused_kwargs) -> tf.data.Dataset:
"""Token-preprocessor to extract one span of at most `max_length` tokens.
If the token sequence is longer than `max_length`, then we return a random
subsequence. Otherwise, we return the full sequence.
This is generally followed by split_tokens.
Args:
dataset: A tf.data.Dataset with dictionaries containing the key feature_key.
output_features: Mapping of keys to features.
max_length: Typically specified in gin configs, takes priority over
sequence_length.
feature_key: Which feature to use from the dataset.
additional_feature_keys: Additional features to use. The same chunk will be
selected from these features as from the one specified in feature_key,
so they should all have the same length.
passthrough_feature_keys: Additional keys to pass through unchanged.
sequence_length: Used if max_length is not specified. Typically passed in
by the data pipeline. feature_key will be used to select the length.
uniform_random_start: If True, will select a starting point in
[-max_length + 1, n_tokens). If False, will select one of a set of chunks
offset by max_length. Both of these starting points try to ensure each
token has an equal probability of being included.
min_length: If specified, lengths of chunks will be selected uniformly at
random from [min_length, max_length]. Note that chunks can end up shorter
than min_length if at the beginning or end of the sequence.
Returns:
a dataset
"""
if passthrough_feature_keys:
chunk_keys = set([feature_key] + (additional_feature_keys or []))
overlap_keys = chunk_keys & set(passthrough_feature_keys)
if overlap_keys:
raise ValueError(
f'chunk keys {overlap_keys} also included in passthrough keys')
if max_length is None and sequence_length is not None:
max_length = sequence_length[feature_key]
if output_features[feature_key].add_eos:
# Leave room to insert an EOS token.
max_length -= 1
if max_length is None:
raise ValueError('Must specify max_length or sequence_length.')
@seqio.map_over_dataset(num_seeds=2)
def _my_fn(x, seeds):
"""Select a random chunk of tokens.
Args:
x: a 1d Tensor
seeds: an int32 Tensor, shaped (2, 2), the random seeds.
Returns:
a 1d Tensor
"""
tokens = x[feature_key]
n_tokens = tf.shape(tokens)[0]
if min_length is not None:
length = tf.random.stateless_uniform(
[],
minval=min_length,
maxval=max_length,
dtype=tf.int32,
seed=seeds[0])
else:
length = max_length
if uniform_random_start:
start = tf.random.stateless_uniform(
[],
minval=-length + 1, # pylint:disable=invalid-unary-operand-type
maxval=n_tokens,
dtype=tf.int32,
seed=seeds[1])
end = tf.minimum(start + length, n_tokens)
start = tf.maximum(start, 0)
else:
num_segments = tf.cast(
tf.math.ceil(
tf.cast(n_tokens, tf.float32) / tf.cast(length, tf.float32)
),
tf.int32)
start = length * tf.random.stateless_uniform(
[],
maxval=num_segments,
dtype=tf.int32,
seed=seeds[1])
end = tf.minimum(start + length, n_tokens)
chunk = {feature_key: tokens[start:end]}
if additional_feature_keys is not None:
for k in additional_feature_keys:
with tf.control_dependencies([
tf.assert_equal(
tf.shape(tokens)[0],
tf.shape(x[k])[0],
message=(f'Additional feature {k} is not the same size as '
f'{feature_key} along axis 0 in select_random_chunk().'
)
)
]):
chunk[k] = x[k][start:end]
if passthrough_feature_keys is not None:
for k in passthrough_feature_keys:
chunk[k] = x[k]
return chunk
# Filter empty examples.
dataset = dataset.filter(lambda x: tf.not_equal(tf.size(x[feature_key]), 0))
return _my_fn(dataset)
@gin.configurable
def reduce_concat_tokens(dataset,
feature_key='targets',
batch_size=128,
**unused_kwargs):
"""Token-preprocessor to concatenate multiple unrelated documents.
If we want to generate examples of exactly the right length,
(to avoid wasting space on padding), then we use this function, folowed by
split_tokens.
Args:
dataset: a tf.data.Dataset with dictionaries containing the key feature_key.
feature_key: an string
batch_size: an integer - how many documents to concatenate into one
Returns:
a dataset
"""
dataset = dataset.map(
lambda x: {feature_key: x[feature_key]}, num_parallel_calls=AUTOTUNE)
dataset = dataset.padded_batch(batch_size, padded_shapes={feature_key: [-1]})
def _my_fn(x):
tokens = tf.reshape(x[feature_key], [-1])
# strip padding
tokens = tf.boolean_mask(tokens, tf.cast(tokens, tf.bool))
return {feature_key: tokens}
return dataset.map(_my_fn, num_parallel_calls=AUTOTUNE)
@seqio.map_over_dataset
def trim_tokens_at_front(x,
sequence_length,
keys_to_trim=None,
**unused_kwargs):
"""Token-preprocessor to trim sequence at the beginning.
Args:
x: an example with dictionaries containing keys_to_trim.
sequence_length: a dict of ints.
keys_to_trim: a list of feature keys.
Returns:
A preprocessed example.
"""
for key in (keys_to_trim or sequence_length.keys()):
if key in x:
# trim tokens, leaving room for EOS which gets added later
x[key] = x[key][-(sequence_length[key] - 1):]
return x
def trivia_qa_truncate_inputs(dataset, output_features, sequence_length):
"""Token preprocessor for the trivia QA dataset to truncate inputs.
This function takes a dataset containing "targets" and "inputs". It searches
for the "targets" in the "inputs" and truncates the "inputs" to
`sequence_length` while ensuring that the "targets" are present in the
"inputs". The function will randomly select a subset of "inputs".
If "targets" are not found in the "inputs", then the example is
is dropped from the dataset.
E.g.
Input dataset
{
"inputs": [0, 3, 5, 7, 9, 11, 13, 15, 17, 18]
"targets": [5, 7, 9]
}
Output dataset (assuming sequence_length['inputs'] = 4)
{
"inputs": [3, 5, 7, 9]
"targets": [5, 7, 9]
}
or
{
"inputs": [5, 7, 9, 11]
"targets": [5, 7, 9]
}
Args:
dataset: a tf.data.Dataset with dictionaries containing the "inputs" and
"targets".
output_features: unused by this function.
sequence_length: a dict, with keys as "inputs" and "targets" indicating the
maximum number of tokens in each of the sequences.
Returns:
a dataset
"""
del output_features
@seqio.map_over_dataset(num_seeds=1)
def my_fn(features, seed):
"""Function to map original dataset to the new dataset."""
inputs = features['inputs']
targets = features['targets']
ans_len = tf.shape(targets)[0]
max_input_tokens = sequence_length['inputs']
def truncate_inputs():
"""Helper function to truncate the inputs."""
def answer_in_context(context, answer):
"""Helper function that checks if the answer is present in the context.
Args:
context: Tensor, tokenized representation of the context
answer: Tensor, tokenized representation of the answer
Returns:
result: boolean, indicates if the answer was present in the context.
pos_mask: boolean mask, a mask for every possible start position of
the answer in the context. Indicates whether the answer starts at
the particular position.
"""
conv_inp = tf.reshape(tf.cast(context, tf.float32), [1, -1, 1])
ans_len = tf.shape(answer)[0]
filters = tf.eye(ans_len, dtype=tf.float32)
# Assume context len is N and answer len is M.
# Use a convolution to create a matrix of (N-M) x M elements where
# each row of the matrix is a sequence of len M. This matrix contains
# all possible contiguous sequences of length M from the context.
# Every row of this matrix is compared with the answer to check if the
# answer exists in the context.
strided = tf.nn.conv1d(conv_inp,
tf.reshape(filters, [ans_len, 1, ans_len]), 1,
'VALID')
strided = tf.cast(strided[0], answer.dtype)
pos_mask = tf.reduce_all(
tf.equal(strided, tf.reshape(answer, [1, -1])), 1)
result = tf.reduce_any(pos_mask)
return result, pos_mask
def slice_inputs(inputs, answer_len, pos_mask, seed=None):
"""Helper function to slice inputs while keeping the answer."""
ans_start_pos = tf.cast(tf.where(pos_mask)[0][0], tf.int32)
inputs_len = tf.shape(inputs)[0]
start_range_min = tf.maximum(
0, ans_start_pos - (max_input_tokens - answer_len))
start_range_max = tf.minimum(ans_start_pos,
inputs_len - max_input_tokens) + 1
start_pos = tf.random.stateless_uniform(
[],
minval=start_range_min,
maxval=start_range_max,
dtype=tf.int32,
seed=seed)
return inputs[start_pos:start_pos + max_input_tokens]
result, pos_mask = answer_in_context(inputs, targets)
if result:
return slice_inputs(inputs, ans_len, pos_mask, seed=seed)
else:
return tf.constant([], dtype=inputs.dtype)
if tf.greater(tf.shape(inputs)[0], max_input_tokens):
inputs = truncate_inputs()
return {'inputs': inputs, 'targets': features['targets']}
dataset = my_fn(dataset)
return dataset.filter(lambda x: tf.size(x['inputs']) > 0)
@gin.configurable()
def unsupervised(dataset,
preprocessors=None,
output_features=None,
sequence_length=None):
"""Configure this to point at unsupervised preprocessors.
This function creates an extra level of indirection in case we want
different unsupervised pretraining functions in the future which do not
fit into the denoise() framework.
This function should be used as a post-cache preprocessing function.
Args:
dataset: A tf.data.Dataset to process.
preprocessors: a list of token-preprocessor functions. These functions
should take unused kwargs if output_features or sequence_length is not
used.
output_features: dict(str, Feature), output features of the Task to be
passed to the model.
sequence_length: dict mapping feature key to int length for that feature.
Returns:
A preprocessed tf.data.Dataset.
"""
if preprocessors is None:
logging.warning(
'unsupervised preprocessor got preprocessors=None; no preprocessing '
'will be applied.'
)
return dataset
kwargs = {}
if output_features:
kwargs['output_features'] = output_features
if sequence_length:
kwargs['sequence_length'] = sequence_length
for p in preprocessors:
dataset = p(dataset, **kwargs)
return dataset
# ======================== split_tokens and helpers ============================
@gin.configurable
def split_tokens(dataset: tf.data.Dataset,
min_tokens_per_segment: Optional[int] = None,
max_tokens_per_segment: int = gin.REQUIRED,
feature_key: str = 'targets',
additional_feature_keys: Optional[Sequence[str]] = None,
passthrough_feature_keys: Optional[Sequence[str]] = None,
num_parallel_calls: int = AUTOTUNE,
**unused_kwargs) -> tf.data.Dataset:
"""Split examples into multiple examples each.
The intended use case is to break up long examples for use in unsupervised
transfer-learning.
This function is generally preceded by select_random_chunk.
If min_tokens_per_segment is provided, the segment length is chosen randomly
per document from a log-uniform distribution. If min_tokens_per_segment is
None, then the segment length is max_tokens_per_segment (except for a possibly
shorter last segment in each document).
Args:
dataset: a tf.data.Dataset with dictionaries containing the key feature_key.
min_tokens_per_segment: an optional integer
max_tokens_per_segment: an integer, the maximum number of tokens in each
segment. Only the final segment may be shorter.
feature_key: a string, the feature to split
additional_feature_keys: Additional features to split. The same chunk size
will be used, so they should be the same size as feature_key.
passthrough_feature_keys: Features to pass through without any splitting.
num_parallel_calls: num_parallel_calls value to pass to map_over_dataset
Returns:
a dataset
"""
if passthrough_feature_keys:
split_keys = set([feature_key] + (additional_feature_keys or []))
overlap_keys = split_keys & set(passthrough_feature_keys)
if overlap_keys:
raise ValueError(
f'split keys {overlap_keys} also included in passthrough keys')
@seqio.map_over_dataset(num_seeds=1, num_parallel_calls=num_parallel_calls)
def _split_tokens(x, seed):
"""Split one token sequence into multiple sequences."""
tokens = x[feature_key]
n_tokens = tf.shape(tokens)[0]
if min_tokens_per_segment is None:
length = max_tokens_per_segment
else:
# pick a length - log-uniformly distributed
length = tf.cast(
tf.exp(
tf.random.stateless_uniform(
[],
minval=math.log(min_tokens_per_segment),
maxval=math.log(max_tokens_per_segment),
seed=seed
)
),
tf.int32)
# Pad to a multiple of length, then use tf.reshape to split up the tokens
# into num_segments segments each of the given length.
num_segments = tf.cast(
tf.math.ceil(
tf.cast(n_tokens, tf.float32) / tf.cast(length, tf.float32))
,
tf.int32)
padding = num_segments * length - tf.shape(tokens)[0]
feature_keys_to_split = [feature_key]
orig_lengths = {}
outputs = {}
if additional_feature_keys is not None:
feature_keys_to_split.extend(additional_feature_keys)
for k in feature_keys_to_split:
with tf.control_dependencies([
tf.assert_equal(
tf.shape(tokens)[0],
tf.shape(x[k])[0],
message=(f'Additional feature {k} is not the same size as '
f'{feature_key} along axis 0 in split_tokens().')
)
]):
shape = tf.shape(x[k])[1:]
shape_list = x[k].shape[1:]
padded = tf.pad(
x[k],
tf.concat([[[0, padding]],
tf.zeros([len(shape_list), 2], dtype=tf.int32)],
axis=0))
orig_lengths[k] = tf.concat(
[tf.repeat(length, num_segments - 1), [length - padding]], axis=0)
outputs[k] = tf.reshape(
padded, tf.concat([[-1, length], shape], axis=0))
if passthrough_feature_keys:
for k in passthrough_feature_keys:
outputs[k] = tf.tile(
tf.expand_dims(x[k], axis=0),
tf.concat([[num_segments], tf.tile([1], [tf.rank(x[k])])], axis=0))
return outputs, orig_lengths
def _strip_padding(inputs, orig_lengths):
output = {}
for k, v in inputs.items():
if passthrough_feature_keys and k in passthrough_feature_keys:
output[k] = v
else:
output[k] = v[:orig_lengths[k]]
return output
# Filter empty examples.
dataset = dataset.filter(lambda x: tf.not_equal(tf.size(x[feature_key]), 0))
dataset = _split_tokens(dataset)
dataset = dataset.unbatch()
dataset = dataset.map(_strip_padding, num_parallel_calls=AUTOTUNE)
return dataset
@gin.configurable
def split_tokens_to_inputs_length(dataset, sequence_length,
output_features, **kwargs):
max_tokens = sequence_length['inputs']
if output_features['inputs'].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return split_tokens(dataset, max_tokens_per_segment=max_tokens, **kwargs)
@gin.configurable
def split_tokens_to_targets_length(dataset, sequence_length,
output_features, **kwargs):
max_tokens = sequence_length['targets']
if output_features['targets'].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return split_tokens(dataset, max_tokens_per_segment=max_tokens, **kwargs)
@gin.configurable
def split_tokens_to_random_length(dataset, sequence_length,
output_features, **kwargs):
max_tokens = sequence_length['inputs']
if output_features['inputs'].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return split_tokens(dataset,
min_tokens_per_segment=8,
max_tokens_per_segment=max_tokens,
**kwargs)
@gin.configurable
def concatenate_and_split_to_fixed_length(dataset,
sequence_length,
output_features,
feature_key='targets',
**unused_kwargs):
"""Concatenate tokens across examples, then split to fixed-size chunks.
Chunk length is determined by sequence_length[feature_key].
Args:
dataset: a tf.data.Dataset
sequence_length: a dict of ints.
output_features: a dict mapping feature name to t5.data.Feature.
feature_key: a string
Returns:
a tf.data.Dataset
"""
dataset = dataset.map(lambda x: {feature_key: x[feature_key]})
max_tokens = sequence_length[feature_key]
if output_features[feature_key].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return dataset.unbatch().batch(max_tokens)
@gin.configurable
def filter_by_string_length(dataset,
feature_key='targets',
min_length=1,
max_length=1000000,
**unused_kwargs):
"""Filter examples by string length.
Args:
dataset: a tf.data.Dataset (not tokenized)
feature_key: a string
min_length: an integer
max_length: an integer
Returns:
a tf.data.Dataset
"""
def my_fn(x):
l = tf.strings.length(x[feature_key])
return tf.logical_and(tf.greater_equal(l, min_length),
tf.less_equal(l, max_length))
return dataset.filter(my_fn)
@gin.configurable
def random_spans_helper(inputs_length=gin.REQUIRED,
noise_density=gin.REQUIRED,
mean_noise_span_length=gin.REQUIRED,
extra_tokens_per_span_inputs=gin.REQUIRED,
extra_tokens_per_span_targets=gin.REQUIRED,
verbose=False):
"""Training parameters to avoid padding with random_spans_noise_mask.
When training a model with random_spans_noise_mask, we would like to set the
other training hyperparmeters in a way that avoids padding. This function
helps us compute these hyperparameters.
We assume that each noise span in the input is replaced by
extra_tokens_per_span_inputs sentinel tokens, and each non-noise span in the
targets is replaced by extra_tokens_per_span_targets sentinel tokens.
This function tells us the required number of tokens in the raw example (for
split_tokens()) as well as the length of the encoded targets.
Note that this function assumes the inputs and targets will have EOS appended
and includes that in the reported length.
Args:
inputs_length: an integer - desired length of the tokenized inputs sequence
noise_density: a float
mean_noise_span_length: a float
extra_tokens_per_span_inputs: an integer
extra_tokens_per_span_targets: an integer
verbose: a bool indicating whether to log sequence lengths
Returns:
tokens_length: length of original text in tokens
targets_length: an integer - length in tokens of encoded targets sequence
"""
def _tokens_length_to_inputs_length_targets_length(tokens_length):
num_noise_tokens = int(round(tokens_length * noise_density))
num_nonnoise_tokens = tokens_length - num_noise_tokens
num_noise_spans = int(round(num_noise_tokens / mean_noise_span_length))
# inputs contain all nonnoise tokens, sentinels for all noise spans
# and one EOS token.
return (
num_nonnoise_tokens +
num_noise_spans * extra_tokens_per_span_inputs + 1,
num_noise_tokens +
num_noise_spans * extra_tokens_per_span_targets + 1)
tokens_length = inputs_length - 1
while (_tokens_length_to_inputs_length_targets_length(tokens_length + 1)[0]
<= inputs_length):
tokens_length += 1
inputs_length, targets_length = (
_tokens_length_to_inputs_length_targets_length(tokens_length))
# minor hack to get the targets length to be equal to inputs length
# which is more likely to have been set to a nice round number.
if noise_density == 0.5 and targets_length > inputs_length:
tokens_length -= 1
targets_length -= 1
if verbose:
logging.info(
'tokens_length=%s inputs_length=%s targets_length=%s '
'noise_density=%s mean_noise_span_length=%s ',
tokens_length, inputs_length, targets_length,
noise_density, mean_noise_span_length)
return tokens_length, targets_length
@gin.configurable
def random_spans_tokens_length():
"""Helper for gin-configuring split_tokens with random_spans_noise_mask."""
return random_spans_helper()[0]
@gin.configurable
def random_spans_targets_length():
"""Helper for gin-configuring the targets sequence length."""
return random_spans_helper()[1]
# ========================== denoise and helpers ===============================
@gin.configurable()
def denoise(dataset,
output_features,
noise_density=gin.REQUIRED,
noise_mask_fn=gin.REQUIRED,
inputs_fn=gin.REQUIRED,
targets_fn=None,
passthrough_feature_keys: Optional[Sequence[str]] = None,
input_feature_key='inputs',
**unused_kwargs):
"""Gin-configurable token preprocessor for self-supervised denoising tasks.
This function takes a dataset containing "targets" sequences,
and turns each sequence into a dictionary containing:
{
"inputs": noisy version of the original sequence
"targets": the full original sequence or missing parts of original sequence
}
In particular, for each sequence, we choose a boolean noise_mask identifying
which tokens in the sequence to corrupt, as defined by the given
noise_mask_fn.
Given the sequence and the noise mask, we generate the inputs and targets
using the given inputs_fn and targets_fn respectively.
The self-supervised tasks vary along these axes:
- noise_density: What fraction of the tokens to select as noise
- noise_mask_fn: What pattern should the noise mask follow
(iid, regular segments, etc.)
- inputs_fn: How to apply the noise
(drop noise tokens, replace with sentinels, etc.)
- targets_fn: How to represent the output
(full sequence, only non-noise tokens, etc.)
Note: Some functionality has been deleted, which we may or may not want to
restore at a later date. The code for this functionality can be found in
the deleted code for this CL. In particular:
- mixture of masking and random replacement
- task labels prepended to the inputs
Args:
dataset: A tf.data.Dataset to process.
output_features: a dict mapping feature name to t5.data.Feature.
noise_density: a float
noise_mask_fn: a function from (length, noise_density) -> boolean mask
inputs_fn: a function from (tokens, noise_mask, vocabulary) -> tokens
targets_fn: a function from (tokens, noise_mask, vocabulary) -> tokens
passthrough_feature_keys: names of additional features to include in output
input_feature_key: name of feature to use as inputs
Returns:
A preprocessed tf.data.Dataset.
"""
if passthrough_feature_keys and (input_feature_key in passthrough_feature_keys
or 'targets' in passthrough_feature_keys):
raise ValueError(
f"passthrough keys cannot contain '{input_feature_key}' or 'targets'")
@seqio.map_over_dataset(num_seeds=6)
def my_fn(features, seeds):
"""Map function."""
tokens = features['targets']
vocabulary = output_features['targets'].vocabulary
if (input_feature_key in output_features and
vocabulary != output_features[input_feature_key].vocabulary):
raise ValueError(
'denoise creates inputs based on tokenized targets but was applied '
'to a task that uses different vocabularies for inputs and targets.')
noise_mask = noise_mask_fn(tf.size(tokens), noise_density, seeds=seeds[:2])
inputs = inputs_fn(tokens, noise_mask, vocabulary, seeds=seeds[2:4])
if targets_fn:
targets = targets_fn(tokens, noise_mask, vocabulary, seeds=seeds[4:6])
else:
targets = tokens
return {
input_feature_key: inputs,
'targets': targets,
**{
k: features[k]
for k in features
if passthrough_feature_keys and k in passthrough_feature_keys
}
}
return my_fn(dataset)
@gin.configurable()
def iid_noise_mask(length, noise_density, seeds):
"""Independent and identically distributed token noise.
Args:
length: an int32 scalar.
noise_density: a float - approximate density of output mask.
seeds: an int32 Tensor, shaped (1, 2), the random seed.
Returns:
a boolean tensor with shape [length].
"""
return tf.random.stateless_uniform([length], seed=seeds[0]) < noise_density
@gin.configurable()
def regular_noise_mask(length,
noise_density,
seeds,
min_span_length=1,
max_span_length=5):
"""Noise mask consisting of equally spaced spans of equal length.
The span length and the offset are chosen randomly per-example.
The beginning and end of the sequence may be part of shorter spans of noise.
For example, if noise_density=0.25 and a span length of 2 is chosen,
then the output might be:
[T F F F F F F T T F F F F F F T T F F F F F F T T F F]
Args:
length: an int32 scalar.
noise_density: a float - approximate density of output mask.
seeds: an int32 Tensor, shaped (2, 2), the random seeds.
min_span_length: an integer.
max_span_length: an integer.
Returns:
a boolean tensor with shape [length].
"""
span_length = tf.random.stateless_uniform(
[],
minval=min_span_length,
maxval=max_span_length + 1,
dtype=tf.int32,
seed=seeds[0])
period = tf.cast(
tf.round(tf.cast(span_length, tf.float32) / noise_density), tf.int32)
offset = tf.random.stateless_uniform(
[],
maxval=period,
dtype=tf.int32,
seed=seeds[1])
return (tf.range(length, dtype=tf.int32) + offset) % period < span_length
@gin.configurable()
def random_spans_noise_mask(length,
noise_density,
seeds,
mean_noise_span_length=3.0):
"""Noise mask consisting of random spans of noise tokens.
The number of noise tokens and the number of noise spans and non-noise spans
are determined deterministically as follows:
num_noise_tokens = round(length * noise_density)
num_nonnoise_spans = num_noise_spans = round(
num_noise_tokens / mean_noise_span_length)
Spans alternate between non-noise and noise, beginning with non-noise.
Subject to the above restrictions, all masks are equally likely.
Args:
length: an int32 scalar (length of the incoming token sequence)
noise_density: a float - approximate density of output mask
seeds: an int32 Tensor, shaped (2, 2)
mean_noise_span_length: a number
Returns:
a boolean tensor with shape [length]
"""
orig_length = length
# increase length to avoid degeneracy
length = tf.maximum(length, 2)
def to_int(x):
return tf.cast(x, tf.int32)
def to_float(x):
return tf.cast(x, tf.float32)
num_noise_tokens = to_int(tf.round(to_float(length) * noise_density))
# avoid degeneracy by ensuring positive numbers of noise and nonnoise tokens.
num_noise_tokens = tf.minimum(tf.maximum(num_noise_tokens, 1), length - 1)
num_noise_spans = to_int(
tf.round(to_float(num_noise_tokens) / mean_noise_span_length))
# avoid degeneracy by ensuring positive number of noise spans
num_noise_spans = tf.maximum(num_noise_spans, 1)
num_nonnoise_tokens = length - num_noise_tokens
# pick the lengths of the noise spans and the non-noise spans
def _random_segmentation(num_items, num_segments, seed):
"""Partition a sequence of items randomly into non-empty segments.
Args:
num_items: an integer scalar > 0
num_segments: an integer scalar in [1, num_items]
seed: an integer seed
Returns:
a Tensor with shape [num_segments] containing positive integers that add
up to num_items
"""
first_in_segment = tf.pad(
seqio.stateless_shuffle(
to_int(tf.range(num_items - 1) < num_segments - 1),
seed),
[[1, 0]])
segment_id = tf.cumsum(first_in_segment)
segment_length = tf.math.segment_sum(tf.ones_like(segment_id), segment_id)
return segment_length
noise_span_lengths = _random_segmentation(
num_noise_tokens, num_noise_spans, seeds[0])
nonnoise_span_lengths = _random_segmentation(
num_nonnoise_tokens, num_noise_spans, seeds[1])
interleaved_span_lengths = tf.reshape(
tf.stack([nonnoise_span_lengths, noise_span_lengths], axis=1),
[num_noise_spans * 2])
span_starts = tf.cumsum(interleaved_span_lengths)[:-1]
span_start_indicator = tf.math.unsorted_segment_sum(
tf.ones_like(span_starts), span_starts, length)
span_num = tf.cumsum(span_start_indicator)
is_noise = tf.equal(span_num % 2, 1)
return is_noise[:orig_length]
@gin.configurable()
def random_prefix_noise_mask(length, noise_density, seeds):
"""First part of the sequence is noise (for prefix_lm).
The length of the prefix is chosen uniformly between [1, length)
noise_density must be 0.5.
TODO(noam): figure out some distribution to use if noise_density != 0.5.
Args:
length: an int32 scalar.
noise_density: a float - must equal 0.5.
seeds: an int32 Tensor, shaped (1, 2), the random seed.
Returns:
a boolean tensor with shape [length].
"""
if noise_density != 0.5:
raise NotImplementedError(
'noise density must equal 0.5 for random_prefix_noise_mask')
max_input_tokens = length - 1
min_input_tokens = tf.minimum(max_input_tokens, 1)
num_input_tokens = tf.random.stateless_uniform(
[],
minval=min_input_tokens,
maxval=max_input_tokens + 1,
dtype=tf.int32,
seed=seeds[0])
return tf.range(length, dtype=tf.int32) < num_input_tokens
@gin.configurable()
def sentinel_id(vocabulary, return_value=None):
"""Token ID to use as a sentinel.
By default, we use the last token in the vocabulary.
Args:
vocabulary: a t5.data.vocabularies.Vocabulary
return_value: an optional integer
Returns:
an integer
"""
if return_value is not None:
return return_value
return vocabulary.vocab_size - 1
@gin.configurable()
def noise_token_to_sentinel(tokens, noise_mask, vocabulary, seeds):
"""Replace each noise token with the given sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del seeds
return tf.where(noise_mask,
tf.cast(sentinel_id(vocabulary), tokens.dtype),
tokens)
@gin.configurable()
def noise_span_to_sentinel(tokens, noise_mask, vocabulary, seeds):
"""Replace each run of consecutive noise tokens with a single sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del seeds
tokens = tf.where(noise_mask,
tf.cast(sentinel_id(vocabulary), tokens.dtype),
tokens)
prev_token_is_noise = tf.pad(noise_mask[:-1], [[1, 0]])
subsequent_noise_tokens = tf.logical_and(noise_mask, prev_token_is_noise)
return tf.boolean_mask(tokens, tf.logical_not(subsequent_noise_tokens))
@gin.configurable()
def nonnoise_span_to_sentinel(tokens, noise_mask, vocabulary, seeds):
return noise_span_to_sentinel(
tokens, tf.logical_not(noise_mask), vocabulary, seeds)
@gin.configurable()
def noise_span_to_unique_sentinel(tokens, noise_mask, vocabulary, seeds):
"""Replace each run of consecutive noise tokens with a different sentinel.
The idea here is to be able to align the dropped spans in the inputs
with the markers in the targets.
We want to generate training examples like
"We hold X to be Y that" -> "X these truths Y self evident Z"
Sentinels assigned in decreasing order within the sequence starting at
vocabulary.size - 1. That is, we appropriate the last tokens in the
vocabulary for additional use as sentinels.
TODO(noam): we may want to try enlarging the vocabulary and leaving room
for the sentinels instead. However, this requires enlarging the embedding
tables in the model, so that is a bigger change.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del seeds
prev_token_is_noise = tf.pad(noise_mask[:-1], [[1, 0]])
first_noise_tokens = tf.logical_and(
noise_mask, tf.logical_not(prev_token_is_noise))
subsequent_noise_tokens = tf.logical_and(noise_mask, prev_token_is_noise)
sentinel = sentinel_id(vocabulary) + 1 - tf.cumsum(
tf.cast(first_noise_tokens, tokens.dtype))
tokens = tf.where(first_noise_tokens, sentinel, tokens)
return tf.boolean_mask(tokens, tf.logical_not(subsequent_noise_tokens))
@gin.configurable()
def nonnoise_span_to_unique_sentinel(tokens, noise_mask, vocabulary, seeds):
return noise_span_to_unique_sentinel(
tokens, tf.logical_not(noise_mask), vocabulary, seeds)
@gin.configurable()
def drop_noise_tokens(tokens, noise_mask, vocabulary, seeds):
"""Drop noise tokens without inserting a sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
del seeds
return tf.boolean_mask(tokens, tf.logical_not(noise_mask))
@gin.configurable()
def drop_nonnoise_tokens(tokens, noise_mask, vocabulary, seeds):
"""Drop non-noise tokens without inserting a sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
del seeds
return tf.boolean_mask(tokens, noise_mask)
@gin.configurable()
def permute_noise_tokens(tokens, noise_mask, vocabulary, seeds):
"""Permute the noise tokens, keeping the non-noise tokens where they are.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an int32 Tensor, sized (1, 2)
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
masked_only = tf.boolean_mask(tokens, noise_mask)
permuted = seqio.stateless_shuffle(masked_only, seeds[0])
# pad to avoid errors when it has size 0
permuted = tf.pad(permuted, [[0, 1]])
indices = tf.cumsum(tf.cast(noise_mask, tf.int32), exclusive=True)
return tf.where(noise_mask,
tf.gather(permuted, indices),
tokens)
@gin.configurable()
def noise_token_to_gathered_token(tokens, noise_mask, vocabulary, seeds):
"""Replace each noise token with a random token from the sequence.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an int32 Tensor, sized (1, 2)
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
indices = tf.random.stateless_uniform(
shape=tf.shape(tokens),
maxval=tf.size(tokens),
dtype=tf.int32,
seed=seeds[0])
return tf.where(noise_mask,
tf.gather(tokens, indices),
tokens)
@gin.configurable()
def noise_token_to_random_token(
tokens,
noise_mask,
vocabulary,
seeds,
num_reserved_tokens=3):
"""Replace each noise token with a random token from the vocabulary.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an int32 Tensor, shaped (1, 2)
num_reserved_tokens: an integer
Returns:
a Tensor with the same shape and dtype as tokens
"""
return tf.where(noise_mask,
tf.random.stateless_uniform(
tf.shape(tokens),
minval=num_reserved_tokens,
maxval=vocabulary.vocab_size,
dtype=tokens.dtype,
seed=seeds[0]),
tokens)
@gin.configurable()
def noise_token_to_random_token_or_sentinel(
tokens,
noise_mask,
vocabulary,
seeds,
random_prob=0.1):
"""Replace each noise token with a random token or a sentinel.
For each masked token, with probability random_prob, we replace it by a
random token from the vocabulary. Otherwise, we replace it with a sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an int32 Tensor, shaped (2, 2).
random_prob: a float
Returns:
a Tensor with the same shape and dtype as tokens
"""
use_random = (
tf.random.stateless_uniform(tf.shape(tokens), seed=seeds[0]) <
random_prob)
return tf.where(
use_random,
noise_token_to_random_token(
tokens, noise_mask, vocabulary, seeds=seeds[1:]),
noise_token_to_sentinel(
tokens, noise_mask, vocabulary, seeds=()))
# =============== EXPERIMENTAL preprocessors (not used for the T5 paper) =======
def trim_and_pad_dataset(dataset, sequence_length):
"""A wrapper to use `seqio.utils.trim_and_pad_dataset` as a preprocessor."""
return seqio.utils.trim_and_pad_dataset(
dataset, feature_lengths=sequence_length)
def targets_for_prefix_lm_objective(dataset, sequence_length, output_features):
"""Prepares targets to be used for prefix LM objective."""
dataset = select_random_chunk(
dataset, output_features, max_length=65536, feature_key='targets')
dataset = seqio.preprocessors.append_eos(dataset, output_features)
dataset = reduce_concat_tokens(dataset, batch_size=128)
dataset = split_tokens(
dataset, max_tokens_per_segment=sequence_length['targets'])
dataset = trim_and_pad_dataset(dataset, sequence_length)
return dataset
def pack_prefix_lm_encoder_decoder(ds, sequence_length, pad_id=0):
"""Pack two examples into one with the prefix LM objective."""
packed_length = next(iter(sequence_length.values()))
assert packed_length % 2 == 0
assert all(l == packed_length for l in sequence_length.values())
@seqio.utils.map_over_dataset(num_seeds=1)
def pack_examples(example_pair, seed):
split_point = tf.random.stateless_uniform((),
minval=1,
maxval=packed_length,
seed=seed,
dtype=tf.int32)
inputs = tf.concat([
example_pair['targets'][0][:split_point],
example_pair['targets'][1][:packed_length - split_point]
],
axis=0)
inputs = tf.reshape(inputs, (packed_length,))
targets = tf.concat([
example_pair['targets'][0][split_point:],
example_pair['targets'][1][packed_length - split_point:]
],
axis=0)
targets = tf.reshape(targets, (packed_length,))
encoder_segment_ids = tf.cast(
tf.range(packed_length) >= split_point, tf.int32) + 1
decoder_segment_ids = tf.cast(
tf.range(packed_length) >= (packed_length - split_point), tf.int32) + 1
decoder_input_tokens = seqio.utils.make_autoregressive_inputs(
targets, sequence_id=decoder_segment_ids)
encoder_positions = tf.concat(
[tf.range(split_point),
tf.range(packed_length - split_point)], axis=0)
encoder_positions = tf.reshape(encoder_positions, (packed_length,))
decoder_positions = tf.concat(
[tf.range(packed_length - split_point),
tf.range(split_point)], axis=0)
decoder_positions = tf.reshape(decoder_positions, (packed_length,))
decoder_loss_weights = tf.cast(
tf.not_equal(targets, pad_id), dtype=tf.int32)
return {
'encoder_input_tokens': inputs,
'decoder_target_tokens': targets,
'decoder_input_tokens': decoder_input_tokens,
'encoder_segment_ids': encoder_segment_ids,
'encoder_positions': encoder_positions,
'decoder_segment_ids': decoder_segment_ids,
'decoder_positions': decoder_positions,
'decoder_loss_weights': decoder_loss_weights,
}
# Note that the batch requires the lengths to be the same.
return pack_examples(ds.batch(2))
def pack_prefix_lm_decoder_only(ds,
sequence_length,
loss_on_targets_only=True,
pad_id=0):
"""Randomly split the tokens for the prefix LM objective."""
packed_length = next(iter(sequence_length.values()))
assert packed_length % 2 == 0
assert all(l == packed_length for l in sequence_length.values())
@seqio.utils.map_over_dataset(num_seeds=1)
def pack_examples(example, seed):
split_point = tf.random.stateless_uniform((),
minval=1,
maxval=packed_length,
seed=seed,
dtype=tf.int32)
decoder_target_tokens = example['targets']
decoder_input_tokens = seqio.utils.make_autoregressive_inputs(
decoder_target_tokens)
if loss_on_targets_only:
decoder_loss_weights = tf.cast(
tf.range(packed_length) >= split_point, tf.int32)
else:
decoder_loss_weights = tf.ones((packed_length,), dtype=tf.int32)
padding_mask = tf.cast(
tf.not_equal(decoder_target_tokens, pad_id), dtype=tf.int32)
decoder_loss_weights *= padding_mask
decoder_causal_attention = tf.cast(
tf.range(packed_length) <= split_point, tf.int32)
return {
'decoder_target_tokens': decoder_target_tokens,
'decoder_input_tokens': decoder_input_tokens,
'decoder_loss_weights': decoder_loss_weights,
'decoder_causal_attention': decoder_causal_attention,
}
return pack_examples(ds)
| google-research/text-to-text-transfer-transformer | t5/data/preprocessors.py | Python | apache-2.0 | 114,576 |
/*
* Copyright 2015 Cognitive Medical Systems, Inc (http://www.cognitivemedicine.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.socraticgrid.hl7.ucs.nifi.test.workbench.command;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import java.util.List;
import org.socraticgrid.hl7.services.uc.interfaces.ManagementIntf;
import org.socraticgrid.hl7.services.uc.model.ServiceInfo;
import org.socraticgrid.hl7.ucs.nifi.test.workbench.converter.ToJSONConverter;
/**
*
* @author esteban
*/
public class DiscoverChannelsCommand implements Command {
@Override
public void init(JsonObject config) {
}
@Override
public JsonObject execute() {
try {
JsonObject result = new JsonObject();
if (CreateUCSSessionCommand.getLastSession() == null){
throw new IllegalArgumentException("The Session is not yet started.");
}
ManagementIntf management = CreateUCSSessionCommand.getLastSession().getNewManagement();
List<ServiceInfo> discoverChannels = management.discoverChannels();
final JsonArray results = new JsonArray();
discoverChannels.stream()
.map(si -> ToJSONConverter.toJsonObject(si))
.forEach(results::add);
result.add("channels", results);
return result;
} catch (Exception ex) {
throw new IllegalArgumentException("Error executing Discover Channel Command: " + ex.getMessage(), ex);
}
}
}
| SocraticGrid/UCS-Implementation | ucs-nifi-samples/ucs-nifi-test-workbench/src/main/java/org/socraticgrid/hl7/ucs/nifi/test/workbench/command/DiscoverChannelsCommand.java | Java | apache-2.0 | 2,137 |
/**
* Copyright (c) 2013-2019 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.geotools.renderer.lite;
import java.awt.Composite;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import org.apache.commons.lang3.tuple.Pair;
import org.geotools.process.function.ProcessFunction;
import org.locationtech.geowave.adapter.vector.plugin.DistributedRenderProcess;
import org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions;
import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult;
import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult.CompositeGroupResult;
import org.locationtech.geowave.adapter.vector.render.PersistableComposite;
import org.locationtech.geowave.adapter.vector.render.PersistableRenderedImage;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
public class DistributedRenderer extends StreamingRenderer {
private final DistributedRenderOptions options;
protected DistributedRenderingBlockingQueue renderQueue;
public DistributedRenderer(final DistributedRenderOptions options) {
this.options = options;
}
@Override
List<List<LiteFeatureTypeStyle>> classifyByFeatureProduction(
final List<LiteFeatureTypeStyle> lfts) {
// strip off a distributed rendering render transform because that is
// what is currently being processed
final List<List<LiteFeatureTypeStyle>> retVal = super.classifyByFeatureProduction(lfts);
for (final List<LiteFeatureTypeStyle> featureTypeStyles : retVal) {
final LiteFeatureTypeStyle transformLfts = featureTypeStyles.get(0);
// there doesn't seem to be an easy way to check if its a
// distributed render transform so for now let's just not allow
// other rendering transformations when distributed rendering is
// employed and strip all transformations
if (transformLfts.transformation instanceof ProcessFunction) {
if ((((ProcessFunction) transformLfts.transformation).getName() != null)
&& ((ProcessFunction) transformLfts.transformation).getName().equals(
DistributedRenderProcess.PROCESS_NAME)) {
transformLfts.transformation = null;
}
}
}
return retVal;
}
@Override
public void setRendererHints(final Map hints) {
hints.put("maxFiltersToSendToDatastore", options.getMaxFilters());
hints.put(StreamingRenderer.LINE_WIDTH_OPTIMIZATION_KEY, options.isOptimizeLineWidth());
super.setRendererHints(hints);
}
@Override
protected BlockingQueue<RenderingRequest> getRequestsQueue() {
renderQueue = new DistributedRenderingBlockingQueue(10000);
return renderQueue;
}
public DistributedRenderResult getResult(final BufferedImage parentImage) {
return renderQueue.getResult(parentImage);
}
public class DistributedRenderingBlockingQueue extends RenderingBlockingQueue {
private static final long serialVersionUID = -1014302908773318665L;
private final Map<Graphics2D, List<Pair<BufferedImage, Composite>>> compositeGroupGraphicsToStyleGraphicsMapping =
new LinkedHashMap<>();
private final Map<Graphics2D, Composite> compositeGroupGraphicsToCompositeMapping =
new HashMap<>();
public DistributedRenderingBlockingQueue(final int capacity) {
super(capacity);
}
@Override
public void put(final RenderingRequest e) throws InterruptedException {
// for merge requests just collect the graphics objects and
// associated composites
if (e instanceof MergeLayersRequest) {
final List<LiteFeatureTypeStyle> lftsList = ((MergeLayersRequest) e).lfts;
final List<Pair<BufferedImage, Composite>> styleGraphics = new ArrayList<>();
final Graphics2D parentGraphics = ((MergeLayersRequest) e).graphics;
for (final LiteFeatureTypeStyle lfts : lftsList) {
if ((lfts.graphics instanceof DelayedBackbufferGraphic)
&& (lfts.graphics != parentGraphics)) {
final DelayedBackbufferGraphic styleGraphic = (DelayedBackbufferGraphic) lfts.graphics;
if (styleGraphic.image != null) {
styleGraphics.add(Pair.of(styleGraphic.image, lfts.composite));
continue;
}
}
// if no style graphic was added, add a null value as a
// placeholder in the list
styleGraphics.add(null);
}
compositeGroupGraphicsToStyleGraphicsMapping.put(parentGraphics, styleGraphics);
} else if (e instanceof MargeCompositingGroupRequest) {
compositeGroupGraphicsToCompositeMapping.put(
((MargeCompositingGroupRequest) e).compositingGroup.graphics,
((MargeCompositingGroupRequest) e).compositingGroup.composite);
} else {
super.put(e);
}
}
public DistributedRenderResult getResult(final BufferedImage parentImage) {
final List<CompositeGroupResult> compositeGroups = new ArrayList<>();
for (final Entry<Graphics2D, List<Pair<BufferedImage, Composite>>> e : compositeGroupGraphicsToStyleGraphicsMapping.entrySet()) {
final Graphics2D compositeGroupGraphic = e.getKey();
final List<Pair<PersistableRenderedImage, PersistableComposite>> orderedStyles =
Lists.transform(
e.getValue(),
new Function<Pair<BufferedImage, Composite>, Pair<PersistableRenderedImage, PersistableComposite>>() {
@Override
public Pair<PersistableRenderedImage, PersistableComposite> apply(
final Pair<BufferedImage, Composite> input) {
if (input == null) {
return null;
}
return Pair.of(
new PersistableRenderedImage(input.getKey()),
input.getValue() == null ? null
: new PersistableComposite(input.getValue()));
}
});
if (compositeGroupGraphic instanceof DelayedBackbufferGraphic) {
final Composite compositeGroupComposite =
compositeGroupGraphicsToCompositeMapping.get(compositeGroupGraphic);
// because mergelayers wasn't writing to the composite
// image, their won't be an image to persist
final PersistableComposite persistableCGC =
compositeGroupComposite == null ? null
: new PersistableComposite(compositeGroupComposite);
compositeGroups.add(new CompositeGroupResult(persistableCGC, orderedStyles));
} else {
// it must be the parent image
compositeGroups.add(new CompositeGroupResult(null, orderedStyles));
}
}
return new DistributedRenderResult(
new PersistableRenderedImage(parentImage),
compositeGroups);
}
}
}
| spohnan/geowave | extensions/adapters/vector/src/main/java/org/geotools/renderer/lite/DistributedRenderer.java | Java | apache-2.0 | 7,461 |
<?php
define('DB_HOST', getenv('OPENSHIFT_DB_HOST'));
define('DB_USER',getenv('OPENSHIFT_DB_USERNAME'));
define('DB_PASS',getenv('OPENSHIFT_DB_PASSWORD'));
define('DB_NAME',getenv('OPENSHIFT_DB_DBNAME'));
// Create connection
$conn = mysqli_connect(DB_HOST,DB_USER,DB_PASS,DB_NAM);
// Check connection
if (!$conn) {
die("Connection failed: " . mysqli_connect_error());
}
echo "Connected successfully";
$sql = "CREATE TABLE os (
id INT(6) UNSIGNED AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(30) NOT NULL,
nbuser VARCHAR(30) NOT NULL,
nbversion VARCHAR(50) NOT NULL,
nbsmart VARCHAR(30) NOT NULL
)";
if (mysqli_query($conn, $sql)) {
echo "Table MyGuests created successfully";
} else {
echo "Error creating table: " . mysqli_error($conn);
}
$sql = "INSERT INTO os (name, nbuser, nbversion,nbsmart)
VALUES ('Android', '100', '23','100')";
if (mysqli_query($conn,$sql)) {
echo "New record created successfullyy";
} else {
echo "Error: " . $sql . "<br>" . $conn->error;
}
$sql = "INSERT INTO os (name, nbuser, nbversion,nbsmart)
VALUES ('iOS', '200', '20','200')";
if (mysqli_query($conn,$sql)) {
echo "New record created successfully";
} else {
echo "Error: " . $sql . "<br>" . $conn->error;
}
$sql = "INSERT INTO os (name, nbuser, nbversion,nbsmart)
VALUES ('BlackBerry', '70', '21','70')";
if (mysqli_query($conn,$sql)) {
echo "New record created successfully";
} else {
echo "Error: " . $sql . "<br>" . $conn->error;
}
$sql = "INSERT INTO os (name, nbuser, nbversion,nbsmart)
VALUES ('WindowsPhone', '60', '20','60')";
if (mysqli_query($conn,$sql)) {
echo "New record created successfully";
} else {
echo "Error: " . $sql . "<br>" . $conn->error;
}
mysqli_close($conn);
?>
| Amira111/gggggggggg | afficher.php | PHP | apache-2.0 | 1,717 |
package com.hazelcast.cache;
import com.hazelcast.cache.impl.CacheProxy;
import com.hazelcast.cache.impl.HazelcastServerCachingProvider;
import com.hazelcast.config.CacheConfig;
import com.hazelcast.config.EvictionConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.test.HazelcastParametersRunnerFactory;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.spi.CachingProvider;
import java.util.Arrays;
import java.util.Iterator;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
@Parameterized.UseParametersRunnerFactory(HazelcastParametersRunnerFactory.class)
@Category({QuickTest.class, ParallelTest.class})
public class CachePartitionIteratorTest extends HazelcastTestSupport {
@Parameterized.Parameter
public boolean prefetchValues;
@Parameterized.Parameters(name = "prefetchValues:{0}")
public static Iterable<Object[]> parameters() {
return Arrays.asList(new Object[]{Boolean.TRUE}, new Object[]{Boolean.FALSE});
}
private CachingProvider cachingProvider;
private HazelcastInstance server;
@Before
public void init() {
server = createHazelcastInstance();
cachingProvider = createCachingProvider();
}
protected CachingProvider createCachingProvider() {
return HazelcastServerCachingProvider.createCachingProvider(server);
}
private <K, V> CacheProxy<K, V> getCacheProxy() {
String cacheName = randomString();
CacheManager cacheManager = cachingProvider.getCacheManager();
CacheConfig<K, V> config = new CacheConfig<K, V>();
config.getEvictionConfig().setMaximumSizePolicy(EvictionConfig.MaxSizePolicy.ENTRY_COUNT).setSize(10000000);
return (CacheProxy<K, V>) cacheManager.createCache(cacheName, config);
}
@Test
public void test_HasNext_Returns_False_On_EmptyPartition() throws Exception {
CacheProxy<Integer, Integer> cache = getCacheProxy();
Iterator<Cache.Entry<Integer, Integer>> iterator = cache.iterator(10, 1, prefetchValues);
assertFalse(iterator.hasNext());
}
@Test
public void test_HasNext_Returns_True_On_NonEmptyPartition() throws Exception {
CacheProxy<String, String> cache = getCacheProxy();
String key = generateKeyForPartition(server, 1);
String value = randomString();
cache.put(key, value);
Iterator<Cache.Entry<String, String>> iterator = cache.iterator(10, 1, prefetchValues);
assertTrue(iterator.hasNext());
}
@Test
public void test_Next_Returns_Value_On_NonEmptyPartition() throws Exception {
CacheProxy<String, String> cache = getCacheProxy();
String key = generateKeyForPartition(server, 1);
String value = randomString();
cache.put(key, value);
Iterator<Cache.Entry<String, String>> iterator = cache.iterator(10, 1, prefetchValues);
Cache.Entry entry = iterator.next();
assertEquals(value, entry.getValue());
}
@Test
public void test_Next_Returns_Value_On_NonEmptyPartition_and_HasNext_Returns_False_when_Item_Consumed() throws Exception {
CacheProxy<String, String> cache = getCacheProxy();
String key = generateKeyForPartition(server, 1);
String value = randomString();
cache.put(key, value);
Iterator<Cache.Entry<String, String>> iterator = cache.iterator(10, 1, prefetchValues);
Cache.Entry entry = iterator.next();
assertEquals(value, entry.getValue());
boolean hasNext = iterator.hasNext();
assertFalse(hasNext);
}
@Test
public void test_Next_Returns_Values_When_FetchSizeExceeds_On_NonEmptyPartition() throws Exception {
CacheProxy<String, String> cache = getCacheProxy();
String value = randomString();
int count = 1000;
for (int i = 0; i < count; i++) {
String key = generateKeyForPartition(server, 42);
cache.put(key, value);
}
Iterator<Cache.Entry<String, String>> iterator = cache.iterator(10, 42, prefetchValues);
for (int i = 0; i < count; i++) {
Cache.Entry entry = iterator.next();
assertEquals(value, entry.getValue());
}
}
}
| lmjacksoniii/hazelcast | hazelcast/src/test/java/com/hazelcast/cache/CachePartitionIteratorTest.java | Java | apache-2.0 | 4,689 |
/*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2017 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
sap.ui.define(['jquery.sap.global', 'sap/ui/core/Renderer', 'sap/ui/layout/ResponsiveFlowLayoutRenderer'],
function(jQuery, Renderer, ResponsiveFlowLayoutRenderer1) {
"use strict";
var ResponsiveFlowLayoutRenderer = Renderer.extend(ResponsiveFlowLayoutRenderer1);
return ResponsiveFlowLayoutRenderer;
}, /* bExport= */ true);
| thbonk/electron-openui5-boilerplate | libs/openui5-runtime/resources/sap/ui/commons/layout/ResponsiveFlowLayoutRenderer-dbg.js | JavaScript | apache-2.0 | 520 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package cryptoio contains functions for reading and writing private/public keys.
package cryptoio
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"math/rand"
"os"
"google.golang.org/protobuf/proto"
"lukechampine.com/uint128"
"github.com/pborman/uuid"
"github.com/google/privacy-sandbox-aggregation-service/encryption/standardencrypt"
"github.com/google/privacy-sandbox-aggregation-service/pipeline/reporttypes"
"github.com/google/privacy-sandbox-aggregation-service/utils/utils"
"github.com/google/tink/go/aead"
"github.com/google/tink/go/core/registry"
"github.com/google/tink/go/integration/gcpkms"
"github.com/google/tink/go/keyset"
"github.com/google/tink/go/tink"
pb "github.com/google/privacy-sandbox-aggregation-service/encryption/crypto_go_proto"
)
// The default file names for stored encryption keys and secret.
const (
DefaultStandardPublicKey = "STANDARD_PUBLIC_KEY"
DefaultStandardPrivateKey = "STANDARD_PRIVATE_KEY"
PublicKeysEnv = "AGGPUBLICKEYS"
)
// PublicKeyInfo contains the details of a standard public key.
type PublicKeyInfo struct {
ID string `json:"id"`
Key string `json:"key"`
NotBefore string `json:"not_before"`
NotAfter string `json:"not_after"`
}
// SavePublicKeyVersions saves the standard public keys and corresponding information.
//
// Keys are saved as an environment variable when filePath is not empty; otherwise as a local or GCS file.
func SavePublicKeyVersions(ctx context.Context, keys map[string][]PublicKeyInfo, filePath string) error {
bKeys, err := json.Marshal(keys)
if err != nil {
return err
}
if filePath == "" {
os.Setenv(PublicKeysEnv, base64.StdEncoding.EncodeToString(bKeys))
return nil
}
return utils.WriteBytes(ctx, bKeys, filePath)
}
// ReadPublicKeyVersions reads the standard public keys and corresponding information.
//
// When filePath is empty, keys are read from a environment variable; otherwise from a local or GCS file.
func ReadPublicKeyVersions(ctx context.Context, filePath string) (map[string][]PublicKeyInfo, error) {
var (
bKeys []byte
err error
)
if filePath == "" {
strKeys := os.Getenv(PublicKeysEnv)
if strKeys == "" {
return nil, fmt.Errorf("empty environment variable %q for public keys", PublicKeysEnv)
}
bKeys, err = base64.StdEncoding.DecodeString(strKeys)
if err != nil {
return nil, err
}
} else {
bKeys, err = utils.ReadBytes(ctx, filePath)
if err != nil {
return nil, err
}
}
keys := make(map[string][]PublicKeyInfo)
err = json.Unmarshal(bKeys, &keys)
return keys, err
}
func getAEADForKMS(keyURI, credentialPath string) (tink.AEAD, error) {
var (
gcpclient registry.KMSClient
err error
)
if credentialPath != "" {
gcpclient, err = gcpkms.NewClientWithCredentials(keyURI, credentialPath)
} else {
gcpclient, err = gcpkms.NewClient(keyURI)
}
if err != nil {
return nil, err
}
registry.RegisterKMSClient(gcpclient)
dek := aead.AES128CTRHMACSHA256KeyTemplate()
kh, err := keyset.NewHandle(aead.KMSEnvelopeAEADKeyTemplate(keyURI, dek))
if err != nil {
return nil, err
}
return aead.New(kh)
}
// KMSEncryptData encrypts the input data with GCP KMS.
//
// The key URI should be in the following format, and the key version is not needed.
// "gcp-kms://projects/<GCP ID>/locations/<key location>/keyRings/<key ring name>/cryptoKeys/<key name>"
func KMSEncryptData(ctx context.Context, keyURI, credentialPath string, data []byte) ([]byte, error) {
a, err := getAEADForKMS(keyURI, credentialPath)
if err != nil {
return nil, err
}
return a.Encrypt(data, nil)
}
// KMSDecryptData decrypts the input data with GCP KMS.
func KMSDecryptData(ctx context.Context, keyURI, credentialPath string, encryptedData []byte) ([]byte, error) {
a, err := getAEADForKMS(keyURI, credentialPath)
if err != nil {
return nil, err
}
return a.Decrypt(encryptedData, nil)
}
// ReadStandardPrivateKeyParams contains necessary parameters for function ReadStandardPrivateKey.
type ReadStandardPrivateKeyParams struct {
// KMSKeyURI and KMSCredentialPath are required by Google Key Mangagement service.
// If KMSKeyURI is empty, the private key is not encrypted with KMS.
KMSKeyURI, KMSCredentialPath string
// SecretName is required by Google SecretManager service.
// If SecretProjectID is empty, the key is stored without SecretManager.
SecretName string
// File path of the (encrypted) private key if it's not stored with SecretManager.
FilePath string
}
// ReadStandardPrivateKey is called by the helper servers, which reads the standard private key.
func ReadStandardPrivateKey(ctx context.Context, params *ReadStandardPrivateKeyParams) (*pb.StandardPrivateKey, error) {
var (
data []byte
err error
)
if params.SecretName != "" {
data, err = utils.ReadSecret(ctx, params.SecretName)
} else {
data, err = utils.ReadBytes(ctx, params.FilePath)
}
if err != nil {
return nil, err
}
if params.KMSKeyURI != "" {
data, err = KMSDecryptData(ctx, params.KMSKeyURI, params.KMSCredentialPath, data)
}
return &pb.StandardPrivateKey{Key: data}, err
}
// SaveStandardPrivateKeyParams contains necessary parameters for function SaveStandardPrivateKey.
type SaveStandardPrivateKeyParams struct {
// KMSKeyURI and KMSCredentialPath are required by Google Key Mangagement service.
// If KMSKeyURI is empty, the private key is not encrypted with KMS.
KMSKeyURI, KMSCredentialPath string
// SecretProjectID and SecretID are required by Google SecretManager service.
// If SecretProjectID is empty, the key is stored without SecretManager.
SecretProjectID, SecretID string
// File path of the (encrypted) private key if it's not stored with SecretManager.
FilePath string
}
// SaveStandardPrivateKey saves the standard encryption private key into a file.
//
// When the private key is stored with Google SecretManager, a secret name should be returned.
// The private keys are allowed to be stored without KMS encryption for testing only, otherwise
// they should always be encrypted before storage.
func SaveStandardPrivateKey(ctx context.Context, params *SaveStandardPrivateKeyParams, privateKey *pb.StandardPrivateKey) (string, error) {
data := privateKey.Key
var err error
if params.KMSKeyURI != "" {
data, err = KMSEncryptData(ctx, params.KMSKeyURI, params.KMSCredentialPath, data)
if err != nil {
return "", err
}
}
if params.SecretProjectID != "" {
return utils.SaveSecret(ctx, data, params.SecretProjectID, params.SecretID)
}
return "", utils.WriteBytes(ctx, data, params.FilePath)
}
// SavePrefixes saves prefixes to a file.
//
// The file can be stored locally or in a GCS bucket (prefixed with 'gs://').
func SavePrefixes(ctx context.Context, filename string, prefixes [][]uint128.Uint128) error {
bPrefixes, err := json.Marshal(prefixes)
if err != nil {
return fmt.Errorf("prefixes marshal(%s) failed: %+v", prefixes, err)
}
return utils.WriteBytes(ctx, bPrefixes, filename)
}
// SaveDPFParameters saves the DPF parameters into a file.
//
// The file can be stored locally or in a GCS bucket (prefixed with 'gs://').
func SaveDPFParameters(ctx context.Context, filename string, params *pb.IncrementalDpfParameters) error {
bParams, err := proto.Marshal(params)
if err != nil {
return fmt.Errorf("params marshal(%s) failed: %v", params.String(), err)
}
return utils.WriteBytes(ctx, bParams, filename)
}
// ReadPrefixes reads the prefixes from a file.
//
// The file can be stored locally or in a GCS bucket (prefixed with 'gs://').
func ReadPrefixes(ctx context.Context, filename string) ([][]uint128.Uint128, error) {
bPrefixes, err := utils.ReadBytes(ctx, filename)
if err != nil {
return nil, err
}
prefixes := [][]uint128.Uint128{}
if err := json.Unmarshal(bPrefixes, &prefixes); err != nil {
return nil, err
}
return prefixes, nil
}
// ReadDPFParameters reads the DPF parameters from a file.
//
// The file can be stored locally or in a GCS bucket (prefixed with 'gs://').
func ReadDPFParameters(ctx context.Context, filename string) (*pb.IncrementalDpfParameters, error) {
bParams, err := utils.ReadBytes(ctx, filename)
if err != nil {
return nil, err
}
params := &pb.IncrementalDpfParameters{}
if err := proto.Unmarshal(bParams, params); err != nil {
return nil, err
}
return params, nil
}
// SavePrivateKeyParamsCollection saves the information how the private keys are saved.
func SavePrivateKeyParamsCollection(ctx context.Context, idKeys map[string]*ReadStandardPrivateKeyParams, uri string) error {
b, err := json.Marshal(idKeys)
if err != nil {
return err
}
return utils.WriteBytes(ctx, b, uri)
}
// ReadPrivateKeyParamsCollection reads the information how the private keys can be read.
func ReadPrivateKeyParamsCollection(ctx context.Context, filePath string) (map[string]*ReadStandardPrivateKeyParams, error) {
b, err := utils.ReadBytes(ctx, filePath)
if err != nil {
return nil, err
}
output := make(map[string]*ReadStandardPrivateKeyParams)
if err := json.Unmarshal(b, &output); err != nil {
return nil, err
}
return output, nil
}
// ReadPrivateKeyCollection reads the private storage information from a file, and then uses it to read the private keys.
func ReadPrivateKeyCollection(ctx context.Context, filePath string) (map[string]*pb.StandardPrivateKey, error) {
keyParams, err := ReadPrivateKeyParamsCollection(ctx, filePath)
if err != nil {
return nil, err
}
keys := make(map[string]*pb.StandardPrivateKey)
for keyID, params := range keyParams {
key, err := ReadStandardPrivateKey(ctx, params)
if err != nil {
return nil, err
}
keys[keyID] = key
}
return keys, nil
}
// GenerateHybridKeyPairs generates encryption key pairs with specified valid time window.
func GenerateHybridKeyPairs(ctx context.Context, keyCount int, notBefore, notAfter string) (map[string]*pb.StandardPrivateKey, []PublicKeyInfo, error) {
privKeys := make(map[string]*pb.StandardPrivateKey)
var pubInfo []PublicKeyInfo
for i := 0; i < keyCount; i++ {
keyID := uuid.New()
priv, pub, err := standardencrypt.GenerateStandardKeyPair()
if err != nil {
return nil, nil, err
}
privKeys[keyID] = priv
pubInfo = append(pubInfo, PublicKeyInfo{
ID: keyID,
Key: base64.StdEncoding.EncodeToString(pub.Key),
NotBefore: notBefore,
NotAfter: notAfter,
})
}
return privKeys, pubInfo, nil
}
// GetRandomPublicKey picks a random public key from a list for the browser simulator.
func GetRandomPublicKey(keys []PublicKeyInfo) (string, *pb.StandardPublicKey, error) {
keyInfo := keys[rand.Intn(len(keys))]
bKey, err := base64.StdEncoding.DecodeString(keyInfo.Key)
if err != nil {
return "", nil, err
}
return keyInfo.ID, &pb.StandardPublicKey{Key: bKey}, nil
}
// SerializeEncryptedReport serializes the EncryptedReport into a string.
func SerializeEncryptedReport(encrypted *pb.EncryptedReport) (string, error) {
bEncrypted, err := proto.Marshal(encrypted)
if err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(bEncrypted), nil
}
// DeserializeEncryptedReport deserializes the EncryptedReport from a string.
func DeserializeEncryptedReport(line string) (*pb.EncryptedReport, error) {
bsc, err := base64.StdEncoding.DecodeString(line)
if err != nil {
return nil, err
}
encrypted := &pb.EncryptedReport{}
if err := proto.Unmarshal(bsc, encrypted); err != nil {
return nil, err
}
return encrypted, nil
}
// DecryptOrUnmarshal tries to decrypt a report first and then unmarshal the payload.
//
// If the report is not encrypted, it unmarshals the payload directly.
func DecryptOrUnmarshal(encrypted *pb.EncryptedReport, privateKey *pb.StandardPrivateKey) (*reporttypes.Payload, bool, error) {
payload, isEncrypted := &reporttypes.Payload{}, true
b, err := standardencrypt.Decrypt(encrypted.EncryptedReport, encrypted.ContextInfo, privateKey)
if err != nil {
isEncrypted = false
if err := utils.UnmarshalCBOR(encrypted.EncryptedReport.Data, payload); err != nil {
return nil, isEncrypted, fmt.Errorf("failed to decrypt and/or deserialize report: %s", encrypted.String())
}
} else if err := utils.UnmarshalCBOR(b, payload); err != nil {
return nil, isEncrypted, err
}
return payload, isEncrypted, nil
}
| google/privacy-sandbox-aggregation-service | encryption/cryptoio.go | GO | apache-2.0 | 12,822 |
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.packaging.impl.artifacts;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.packaging.artifacts.ArtifactType;
import com.intellij.packaging.elements.CompositePackagingElement;
import com.intellij.packaging.elements.PackagingElementFactory;
import com.intellij.packaging.elements.PackagingElementOutputKind;
import consulo.ui.image.Image;
import jakarta.inject.Singleton;
import javax.annotation.Nonnull;
/**
* @author nik
*/
@Singleton
public class InvalidArtifactType extends ArtifactType {
public static InvalidArtifactType getInstance() {
return ServiceManager.getService(InvalidArtifactType.class);
}
public InvalidArtifactType() {
super("invalid", "Invalid");
}
@Nonnull
@Override
public Image getIcon() {
return AllIcons.FileTypes.Unknown;
}
@Override
public String getDefaultPathFor(@Nonnull PackagingElementOutputKind kind) {
return "";
}
@Nonnull
@Override
public CompositePackagingElement<?> createRootElement(@Nonnull PackagingElementFactory packagingElementFactory, @Nonnull String artifactName) {
return packagingElementFactory.createArtifactRootElement();
}
}
| consulo/consulo | modules/base/compiler-impl/src/main/java/com/intellij/packaging/impl/artifacts/InvalidArtifactType.java | Java | apache-2.0 | 1,817 |
# frozen_string_literal: true
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "googleauth"
module Google
module Ads
module GoogleAds
module V10
module Services
module AdGroupAdLabelService
# Credentials for the AdGroupAdLabelService API.
class Credentials < ::Google::Auth::Credentials
self.scope = [
"https://www.googleapis.com/auth/adwords"
]
end
end
end
end
end
end
end
| googleads/google-ads-ruby | lib/google/ads/google_ads/v10/services/ad_group_ad_label_service/credentials.rb | Ruby | apache-2.0 | 1,107 |
package demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.config.server.EnableConfigServer;
import org.springframework.context.annotation.Configuration;
import org.springframework.cloud.netflix.zuul.EnableZuulProxy;
import org.springframework.cloud.netflix.eureka.server.EnableEurekaServer;
@Configuration
@EnableAutoConfiguration
@EnableDiscoveryClient
@EnableConfigServer
@EnableEurekaServer
@EnableZuulProxy
public class ConfigServerApplication {
public static void main(String[] args) {
SpringApplication.run(ConfigServerApplication.class, args);
}
}
| nicolinux72/anarchia | cell/omnidev/src/main/java/demo/ConfigServerApplication.java | Java | apache-2.0 | 747 |
// [WriteFile Name=DistanceMeasurementAnalysis, Category=Analysis]
// [Legal]
// Copyright 2018 Esri.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// [Legal]
#ifdef PCH_BUILD
#include "pch.hpp"
#endif // PCH_BUILD
#include "DistanceMeasurementAnalysis.h"
#include "ArcGISTiledElevationSource.h"
#include "Scene.h"
#include "SceneQuickView.h"
#include "AnalysisOverlay.h"
#include "LocationDistanceMeasurement.h"
#include "Viewpoint.h"
#include "Camera.h"
#include "ArcGISSceneLayer.h"
#include "Point.h"
using namespace Esri::ArcGISRuntime;
DistanceMeasurementAnalysis::DistanceMeasurementAnalysis(QQuickItem* parent /* = nullptr */):
QQuickItem(parent)
{
}
void DistanceMeasurementAnalysis::init()
{
// Register classes for QML
qmlRegisterType<SceneQuickView>("Esri.Samples", 1, 0, "SceneView");
qmlRegisterType<DistanceMeasurementAnalysis>("Esri.Samples", 1, 0, "DistanceMeasurementAnalysisSample");
}
void DistanceMeasurementAnalysis::componentComplete()
{
QQuickItem::componentComplete();
// Get the Scene View
m_sceneView = findChild<SceneQuickView*>("sceneView");
// Create a Scene with the topographic basemap
Scene* scene = new Scene(Basemap::topographic(this), this);
// Add a Scene Layer
ArcGISSceneLayer* sceneLayer = new ArcGISSceneLayer(QUrl("https://tiles.arcgis.com/tiles/P3ePLMYs2RVChkJx/arcgis/rest/services/Buildings_Brest/SceneServer/layers/0"), this);
sceneLayer->setAltitudeOffset(1); // The elevation source is a very fine resolution so we raise the scene layer slightly so it does not clip the surface
scene->operationalLayers()->append(sceneLayer);
// Create and set the surface on the scene
Surface* surface = new Surface(this);
surface->elevationSources()->append(
new ArcGISTiledElevationSource(QUrl("https://scene.arcgis.com/arcgis/rest/services/BREST_DTM_1M/ImageServer"),this));
scene->setBaseSurface(surface);
// Add Analysis Overlay
AnalysisOverlay* analysisOverlay = new AnalysisOverlay(this);
m_sceneView->analysisOverlays()->append(analysisOverlay);
// Create and add the LocationDistanceMeasurement
const Point startLocation(-4.494677, 48.384472, 24.772694, SpatialReference::wgs84());
const Point endLocation(-4.495646, 48.384377, 58.501115, SpatialReference::wgs84());
m_distanceAnalysis = new LocationDistanceMeasurement(startLocation, endLocation, this);
m_distanceAnalysis->setUnitSystem(UnitSystem::Metric);
analysisOverlay->analyses()->append(m_distanceAnalysis);
// Set initial viewpoint
constexpr double distance = 400.0;
constexpr double pitch = 45.0;
constexpr double heading = 0.0;
constexpr double roll = 0.0;
const Camera initCamera(startLocation, distance, heading, pitch, roll);
const Viewpoint initViewpoint(startLocation, distance, initCamera);
scene->setInitialViewpoint(initViewpoint);
// Set the scene on the scene view
m_sceneView->setArcGISScene(scene);
connectSignals();
}
void DistanceMeasurementAnalysis::connectSignals()
{
// connect to signal to obtain updated distances
connect(m_distanceAnalysis, &LocationDistanceMeasurement::measurementChanged, this, [this](const Distance& directDistance,
const Distance& horizontalDistance,
const Distance& verticalDistance)
{
const QString unitLabel = m_distanceAnalysis->unitSystem() == UnitSystem::Metric ? "m" : "ft";
m_directDistance = QString::number(directDistance.value(), 'f', 2) + QString(" %1").arg(unitLabel);
m_horizontalDistance = QString::number(horizontalDistance.value(), 'f', 2) + QString(" %1").arg(unitLabel);
m_verticalDistance = QString::number(verticalDistance.value(), 'f', 2) + QString(" %1").arg(unitLabel);
emit directDistanceChanged();
emit horizontalDistanceChanged();
emit verticalDistanceChanged();
});
// connect to mouse signals to update the analysis
// When the mouse is pressed and held, start updating the distance analysis end point
connect(m_sceneView, &SceneQuickView::mousePressedAndHeld, this, [this](QMouseEvent& mouseEvent)
{
m_isPressAndHold = true;
m_sceneView->screenToLocation(mouseEvent.x(), mouseEvent.y());
});
// When the mouse is released...
connect(m_sceneView, &SceneQuickView::mouseReleased, this, [this](QMouseEvent& mouseEvent)
{
// Check if the mouse was released from a pan gesture
if (m_isNavigating)
{
m_isNavigating = false;
return;
}
// Ignore if Right click
if (mouseEvent.button() == Qt::RightButton)
return;
// If pressing and holding, do nothing
if (m_isPressAndHold)
m_isPressAndHold = false;
// Else get the location from the screen coordinates
else
m_sceneView->screenToLocation(mouseEvent.x(), mouseEvent.y());
});
// Update the distance analysis when the mouse moves if it is a press and hold movement
connect(m_sceneView, &SceneQuickView::mouseMoved, this, [this](QMouseEvent& mouseEvent)
{
if (m_isPressAndHold)
m_sceneView->screenToLocation(mouseEvent.x(), mouseEvent.y());
});
// Set a flag when mousePressed signal emits
connect(m_sceneView, &SceneQuickView::mousePressed, this, [this]
{
m_isNavigating = false;
});
// When screenToLocation completes...
connect(m_sceneView, &SceneQuickView::screenToLocationCompleted, this, [this](QUuid, Point pt)
{
// If it was from a press and hold, update the end location
if (m_isPressAndHold)
m_distanceAnalysis->setEndLocation(pt);
// Else if it was a normal mouse click (press and release), update the start location
else
m_distanceAnalysis->setStartLocation(pt);
});
// Set a flag when viewpointChanged signal emits
connect(m_sceneView, &SceneQuickView::viewpointChanged, this, [this]
{
m_isNavigating = true;
});
}
void DistanceMeasurementAnalysis::setUnits(const QString& unitName)
{
if (!m_distanceAnalysis)
return;
if (unitName == "Metric")
m_distanceAnalysis->setUnitSystem(UnitSystem::Metric);
else
m_distanceAnalysis->setUnitSystem(UnitSystem::Imperial);
}
QString DistanceMeasurementAnalysis::directDistance() const
{
return m_directDistance;
}
QString DistanceMeasurementAnalysis::horizontalDistance() const
{
return m_horizontalDistance;
}
QString DistanceMeasurementAnalysis::verticalDistance() const
{
return m_verticalDistance;
}
| Esri/arcgis-runtime-samples-qt | ArcGISRuntimeSDKQt_CppSamples/Analysis/DistanceMeasurementAnalysis/DistanceMeasurementAnalysis.cpp | C++ | apache-2.0 | 7,003 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpointemail.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.pinpointemail.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ListDomainDeliverabilityCampaignsRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ListDomainDeliverabilityCampaignsRequestMarshaller {
private static final MarshallingInfo<java.util.Date> STARTDATE_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("StartDate").timestampFormat("iso8601").build();
private static final MarshallingInfo<java.util.Date> ENDDATE_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("EndDate").timestampFormat("iso8601").build();
private static final MarshallingInfo<String> SUBSCRIBEDDOMAIN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PATH).marshallLocationName("SubscribedDomain").build();
private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("NextToken").build();
private static final MarshallingInfo<Integer> PAGESIZE_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("PageSize").build();
private static final ListDomainDeliverabilityCampaignsRequestMarshaller instance = new ListDomainDeliverabilityCampaignsRequestMarshaller();
public static ListDomainDeliverabilityCampaignsRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(ListDomainDeliverabilityCampaignsRequest listDomainDeliverabilityCampaignsRequest, ProtocolMarshaller protocolMarshaller) {
if (listDomainDeliverabilityCampaignsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listDomainDeliverabilityCampaignsRequest.getStartDate(), STARTDATE_BINDING);
protocolMarshaller.marshall(listDomainDeliverabilityCampaignsRequest.getEndDate(), ENDDATE_BINDING);
protocolMarshaller.marshall(listDomainDeliverabilityCampaignsRequest.getSubscribedDomain(), SUBSCRIBEDDOMAIN_BINDING);
protocolMarshaller.marshall(listDomainDeliverabilityCampaignsRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listDomainDeliverabilityCampaignsRequest.getPageSize(), PAGESIZE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-pinpointemail/src/main/java/com/amazonaws/services/pinpointemail/model/transform/ListDomainDeliverabilityCampaignsRequestMarshaller.java | Java | apache-2.0 | 3,622 |
import {observable, action, computed} from 'mobx'
import Todo from '../models/Todo'
export class TodoStore {
@observable todos = [];
@observable showingOnlyIncompleteTodos;
@action createTodo({title, done}) {
this.todos.push(new Todo({title, done}));
}
@action toggleOnlyIncompleteTodos() {
this.showingOnlyIncompleteTodos = !this.showingOnlyIncompleteTodos;
}
@computed get incompleteTodos() {
return this.todos.filter((todo) => !todo.done);
}
constructor() {
this.showingOnlyIncompleteTodos = false;
this.createTodo({title: 'Sample Todo 1', done: true});
this.createTodo({title: 'Sample Todo 2', done: false});
}
}
export default new TodoStore();
| smakazmidd/react-starter | client/src/script/stores/TodoStore.js | JavaScript | apache-2.0 | 700 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.12.03 at 08:07:19 AM CET
//
package iso.std.iso._20022.tech.xsd.pain_008_003_02;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for PartyIdentificationSEPA2 complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="PartyIdentificationSEPA2">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Nm" type="{urn:iso:std:iso:20022:tech:xsd:pain.008.003.02}Max70Text"/>
* <element name="PstlAdr" type="{urn:iso:std:iso:20022:tech:xsd:pain.008.003.02}PostalAddressSEPA" minOccurs="0"/>
* <element name="Id" type="{urn:iso:std:iso:20022:tech:xsd:pain.008.003.02}PartySEPAChoice" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "PartyIdentificationSEPA2", propOrder = {
"nm",
"pstlAdr",
"id"
})
public class PartyIdentificationSEPA2 {
@XmlElement(name = "Nm", required = true)
protected String nm;
@XmlElement(name = "PstlAdr")
protected PostalAddressSEPA pstlAdr;
@XmlElement(name = "Id")
protected PartySEPAChoice id;
/**
* Gets the value of the nm property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNm() {
return nm;
}
/**
* Sets the value of the nm property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNm(String value) {
this.nm = value;
}
/**
* Gets the value of the pstlAdr property.
*
* @return
* possible object is
* {@link PostalAddressSEPA }
*
*/
public PostalAddressSEPA getPstlAdr() {
return pstlAdr;
}
/**
* Sets the value of the pstlAdr property.
*
* @param value
* allowed object is
* {@link PostalAddressSEPA }
*
*/
public void setPstlAdr(PostalAddressSEPA value) {
this.pstlAdr = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link PartySEPAChoice }
*
*/
public PartySEPAChoice getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link PartySEPAChoice }
*
*/
public void setId(PartySEPAChoice value) {
this.id = value;
}
}
| germamix/sepa-pain-lib | sepa-pain-lib/src/main/java/iso/std/iso/_20022/tech/xsd/pain_008_003_02/PartyIdentificationSEPA2.java | Java | apache-2.0 | 3,298 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.CodeAnalysis.CodeGen;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Test.Utilities;
using Microsoft.CodeAnalysis.ExpressionEvaluator;
using Microsoft.CodeAnalysis.ExpressionEvaluator.UnitTests;
using Microsoft.CodeAnalysis.Test.Utilities;
using Microsoft.DiaSymReader;
using Microsoft.VisualStudio.Debugger.Evaluation;
using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.CSharp.ExpressionEvaluator.UnitTests
{
public class LocalsTests : ExpressionCompilerTestBase
{
[Fact]
public void NoLocals()
{
var source =
@"class C
{
static void M()
{
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.Equal(assembly.Count, 0);
Assert.Equal(locals.Count, 0);
locals.Free();
});
}
[Fact]
public void Locals()
{
var source =
@"class C
{
void M(int[] a)
{
string b;
a[1]++;
lock (new C())
{
#line 999
int c = 3;
b = a[c].ToString();
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M", atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 4);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (string V_0, //b
C V_1,
bool V_2,
int V_3) //c
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "a", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (string V_0, //b
C V_1,
bool V_2,
int V_3) //c
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "b", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (string V_0, //b
C V_1,
bool V_2,
int V_3) //c
IL_0000: ldloc.0
IL_0001: ret
}
");
VerifyLocal(testData, typeName, locals[3], "<>m3", "c", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (string V_0, //b
C V_1,
bool V_2,
int V_3) //c
IL_0000: ldloc.3
IL_0001: ret
}");
locals.Free();
});
}
[Fact]
public void LocalsInSwitch()
{
var source =
@"class C
{
void M(object o)
{
switch (o)
{
case string s:
var a = s;
#line 1000
return;
case int s:
#line 2000
return;
default:
return;
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M", atLineNumber: 1000);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 4);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5, //s
int V_6,
object V_7,
int? V_8)
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5, //s
int V_6,
object V_7,
int? V_8)
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "a", expectedILOpt:
@"{
// Code size 3 (0x3)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5, //s
int V_6,
object V_7,
int? V_8)
IL_0000: ldloc.s V_4
IL_0002: ret
}
");
VerifyLocal(testData, typeName, locals[3], "<>m3", "s", expectedILOpt:
@"{
// Code size 3 (0x3)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5, //s
int V_6,
object V_7,
int? V_8)
IL_0000: ldloc.s V_5
IL_0002: ret
}");
locals.Free();
context = CreateMethodContext(runtime, "C.M", atLineNumber: 2000);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 4);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5,
int V_6, //s
object V_7,
int? V_8)
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5,
int V_6, //s
object V_7,
int? V_8)
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "a", expectedILOpt:
@"{
// Code size 3 (0x3)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5,
int V_6, //s
object V_7,
int? V_8)
IL_0000: ldloc.s V_4
IL_0002: ret
}
");
VerifyLocal(testData, typeName, locals[3], "<>m3", "s", expectedILOpt:
@"{
// Code size 3 (0x3)
.maxstack 1
.locals init (object V_0,
string V_1,
int V_2,
object V_3,
string V_4, //a
string V_5,
int V_6, //s
object V_7,
int? V_8)
IL_0000: ldloc.s V_6
IL_0002: ret
}");
locals.Free();
});
}
[Fact]
[WorkItem(16594, "https://github.com/dotnet/roslyn/issues/16594")]
public void LocalsInSwitchWithLambda()
{
var source =
@"class C
{
System.Action M(object o)
{
switch (o)
{
case string s:
var a = s;
#line 1000
return () =>
{
#line 2000
System.Console.WriteLine(s + a);
};
case int s:
#line 3000
return () =>
{
#line 4000
System.Console.WriteLine(s);
};
default:
return null;
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M", atLineNumber: 1000);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 3);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
object V_1,
string V_2,
int V_3,
object V_4,
object V_5,
int? V_6,
System.Action V_7)
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
object V_1,
string V_2,
int V_3,
object V_4,
object V_5,
int? V_6,
System.Action V_7)
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "a", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
object V_1,
string V_2,
int V_3,
object V_4,
object V_5,
int? V_6,
System.Action V_7)
IL_0000: ldloc.0
IL_0001: ldfld ""string C.<>c__DisplayClass0_0.a""
IL_0006: ret
}
");
// We should be able to evaluate "s" within this context, https://github.com/dotnet/roslyn/issues/16594.
// VerifyLocal(testData, typeName, locals[3], "<>m3", "s", expectedILOpt:
//@"{
// // Code size 8 (0x8)
// .maxstack 1
// .locals init (C.<>c__DisplayClass0_1 V_0, //CS$<>8__locals0
// object V_1,
// string V_2,
// int V_3,
// object V_4,
// object V_5,
// int? V_6,
// C.<>c__DisplayClass0_0 V_7, //CS$<>8__locals1
// System.Action V_8,
// C.<>c__DisplayClass0_2 V_9)
// IL_0000: ldloc.s V_7
// IL_0002: ldfld ""string C.<>c__DisplayClass0_0.s""
// IL_0007: ret
//}");
locals.Free();
context = CreateMethodContext(runtime, "C.M", atLineNumber: 3000);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 3);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
object V_1,
string V_2,
int V_3,
object V_4,
object V_5,
int? V_6,
System.Action V_7)
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
object V_1,
string V_2,
int V_3,
object V_4,
object V_5,
int? V_6,
System.Action V_7)
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "a", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
object V_1,
string V_2,
int V_3,
object V_4,
object V_5,
int? V_6,
System.Action V_7)
IL_0000: ldloc.0
IL_0001: ldfld ""string C.<>c__DisplayClass0_0.a""
IL_0006: ret
}
");
// We should be able to evaluate "s" within this context, https://github.com/dotnet/roslyn/issues/16594.
// VerifyLocal(testData, typeName, locals[3], "<>m3", "s", expectedILOpt:
//@"{
// // Code size 8 (0x8)
// .maxstack 1
// .locals init (C.<>c__DisplayClass0_1 V_0, //CS$<>8__locals0
// object V_1,
// string V_2,
// int V_3,
// object V_4,
// object V_5,
// int? V_6,
// C.<>c__DisplayClass0_0 V_7,
// System.Action V_8,
// C.<>c__DisplayClass0_2 V_9) //CS$<>8__locals2
// IL_0000: ldloc.s V_9
// IL_0002: ldfld ""int C.<>c__DisplayClass0_2.s""
// IL_0007: ret
//}");
locals.Free();
context = CreateMethodContext(runtime, "C.<>c__DisplayClass0_0.<M>b__0", atLineNumber: 2000);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 1);
// We should be able to evaluate "s" within this context, https://github.com/dotnet/roslyn/issues/16594.
// VerifyLocal(testData, typeName, locals[0], "<>m0", "s", expectedILOpt:
//@"{
// // Code size 7 (0x7)
// .maxstack 1
// IL_0000: ldarg.0
// IL_0001: ldfld ""string C.<>c__DisplayClass0_0.s""
// IL_0006: ret
//}");
VerifyLocal(testData, typeName, locals[0], "<>m0", "a", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ldfld ""string C.<>c__DisplayClass0_0.a""
IL_0006: ret
}");
locals.Free();
context = CreateMethodContext(runtime, "C.<>c__DisplayClass0_0.<M>b__1", atLineNumber: 4000);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 1);
// We should be able to evaluate "s" within this context, https://github.com/dotnet/roslyn/issues/16594.
// VerifyLocal(testData, typeName, locals[0], "<>m0", "s", expectedILOpt:
//@"{
// // Code size 7 (0x7)
// .maxstack 1
// IL_0000: ldarg.0
// IL_0001: ldfld ""int C.<>c__DisplayClass0_2.s""
// IL_0006: ret
//}");
VerifyLocal(testData, typeName, locals[0], "<>m0", "a", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ldfld ""string C.<>c__DisplayClass0_0.a""
IL_0006: ret
}");
locals.Free();
});
}
[Fact]
public void LocalsInSwitchWithAwait()
{
var source =
@"
using System.Threading.Tasks;
class C
{
async Task<object> F()
{
return new object();
}
async Task<object> M(object o)
{
switch (o)
{
case string s:
var a = s;
#line 1000
await F();
System.Console.WriteLine(s + a);
return o;
case int s:
#line 2000
await F();
System.Console.WriteLine(s);
return o;
default:
return o;
}
}
}";
var compilation0 = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll,
references: new[] { SystemRef_v4_0_30319_17929, SystemCoreRef_v4_0_30319_17929, CSharpRef });
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext", atLineNumber: 1000);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 4);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""C C.<M>d__1.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.o""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "a", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""string C.<M>d__1.<a>5__1""
IL_0006: ret
}
");
VerifyLocal(testData, typeName, locals[3], "<>m3", "s", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""string C.<M>d__1.<s>5__2""
IL_0006: ret
}");
locals.Free();
context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext", atLineNumber: 2000);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 4);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""C C.<M>d__1.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.o""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "a", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""string C.<M>d__1.<a>5__1""
IL_0006: ret
}
");
VerifyLocal(testData, typeName, locals[3], "<>m3", "s", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
object V_2,
string V_3,
int V_4,
object V_5,
object V_6,
int? V_7,
System.Runtime.CompilerServices.TaskAwaiter<object> V_8,
C.<M>d__1 V_9,
System.Runtime.CompilerServices.TaskAwaiter<object> V_10,
System.Exception V_11)
IL_0000: ldarg.0
IL_0001: ldfld ""int C.<M>d__1.<s>5__3""
IL_0006: ret
}");
locals.Free();
});
}
/// <summary>
/// No local signature (debugging a .dmp with no heap). Local
/// names are known but types are not so the locals are dropped.
/// Expressions that do not involve locals can be evaluated however.
/// </summary>
[Fact]
public void NoLocalSignature()
{
var source =
@"class C
{
void M(int[] a)
{
string b;
a[1]++;
lock (new C())
{
#line 999
int c = 3;
b = a[c].ToString();
}
}
}";
var comp = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(comp, references: null, includeLocalSignatures: false, includeIntrinsicAssembly: true, validator: runtime =>
{
var context = CreateMethodContext(
runtime,
methodName: "C.M",
atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "a", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ret
}");
locals.Free();
string error;
testData = new CompilationTestData();
context.CompileExpression("b", out error, testData);
Assert.Equal(error, "error CS0103: The name 'b' does not exist in the current context");
testData = new CompilationTestData();
context.CompileExpression("a[1]", out error, testData);
string actualIL = testData.GetMethodData("<>x.<>m0").GetMethodIL();
AssertEx.AssertEqualToleratingWhitespaceDifferences(actualIL,
@"{
// Code size 4 (0x4)
.maxstack 2
IL_0000: ldarg.1
IL_0001: ldc.i4.1
IL_0002: ldelem.i4
IL_0003: ret
}");
});
}
[Fact]
public void LocalsAndPseudoVariables()
{
var source =
@"class C
{
void M(object o)
{
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var aliases = ImmutableArray.Create(
ExceptionAlias(typeof(System.IO.IOException)),
ReturnValueAlias(2, typeof(string)),
ReturnValueAlias(),
ObjectIdAlias(2, typeof(bool)),
VariableAlias("o", "C"));
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var diagnostics = DiagnosticBag.GetInstance();
var testData = new CompilationTestData();
context.CompileGetLocals(
locals,
argumentsOnly: true,
aliases: aliases,
diagnostics: diagnostics,
typeName: out typeName,
testData: testData);
Assert.Equal(locals.Count, 1);
VerifyLocal(testData, typeName, locals[0], "<>m0", "o");
locals.Clear();
testData = new CompilationTestData();
context.CompileGetLocals(
locals,
argumentsOnly: false,
aliases: aliases,
diagnostics: diagnostics,
typeName: out typeName,
testData: testData);
diagnostics.Free();
Assert.Equal(locals.Count, 6);
VerifyLocal(testData, typeName, locals[0], "<>m0", "$exception", "Error", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 11 (0xb)
.maxstack 1
IL_0000: call ""System.Exception Microsoft.VisualStudio.Debugger.Clr.IntrinsicMethods.GetException()""
IL_0005: castclass ""System.IO.IOException""
IL_000a: ret
}");
// $ReturnValue is suppressed since it always matches the last $ReturnValueN
VerifyLocal(testData, typeName, locals[1], "<>m1", "$ReturnValue2", "Method M2 returned", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 12 (0xc)
.maxstack 1
IL_0000: ldc.i4.2
IL_0001: call ""object Microsoft.VisualStudio.Debugger.Clr.IntrinsicMethods.GetReturnValue(int)""
IL_0006: castclass ""string""
IL_000b: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "$2", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 16 (0x10)
.maxstack 1
IL_0000: ldstr ""$2""
IL_0005: call ""object Microsoft.VisualStudio.Debugger.Clr.IntrinsicMethods.GetObjectByAlias(string)""
IL_000a: unbox.any ""bool""
IL_000f: ret
}");
VerifyLocal(testData, typeName, locals[3], "<>m3", "o", expectedILOpt:
@"{
// Code size 16 (0x10)
.maxstack 1
IL_0000: ldstr ""o""
IL_0005: call ""object Microsoft.VisualStudio.Debugger.Clr.IntrinsicMethods.GetObjectByAlias(string)""
IL_000a: castclass ""C""
IL_000f: ret
}");
VerifyLocal(testData, typeName, locals[4], "<>m4", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[5], "<>m5", "o", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ret
}");
locals.Free();
// Confirm that the Watch window is unaffected by the filtering in the Locals window.
string error;
context.CompileExpression("$ReturnValue", DkmEvaluationFlags.TreatAsExpression, aliases, out error);
Assert.Null(error);
});
}
[Fact]
public void This()
{
var source =
@"class C
{
void M(object @this)
{
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "@this", expectedILOpt: // Native EE uses "this" rather than "@this".
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ret
}");
locals.Free();
});
}
[Fact]
public void ArgumentsOnly()
{
var source =
@"class C
{
void M<T>(T x)
{
object y = x;
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: true, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 1);
VerifyLocal(testData, typeName, locals[0], "<>m0<T>", "x", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (object V_0) //y
IL_0000: ldarg.1
IL_0001: ret
}",
expectedGeneric: true);
locals.Free();
});
}
/// <summary>
/// Compiler-generated locals should be ignored.
/// </summary>
[Fact]
public void CompilerGeneratedLocals()
{
var source =
@"class C
{
static bool F(object[] args)
{
if (args == null)
{
return true;
}
foreach (var o in args)
{
#line 999
}
((System.Func<object>)(() => args[0]))();
return false;
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.F", atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, typeName, locals[0], "<>m0", "args", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
bool V_1,
bool V_2,
object[] V_3,
int V_4,
object V_5) //o
IL_0000: ldloc.0
IL_0001: ldfld ""object[] C.<>c__DisplayClass0_0.args""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 3 (0x3)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
bool V_1,
bool V_2,
object[] V_3,
int V_4,
object V_5) //o
IL_0000: ldloc.s V_5
IL_0002: ret
}");
locals.Free();
});
}
[WorkItem(928113, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/928113")]
[Fact]
public void Constants()
{
var source =
@"class C
{
const int x = 2;
static int F(int w)
{
#line 888
w.ToString(); // Force a non-hidden sequence point.
const int y = 3;
const object v = null;
if ((v == null) || (w < 2))
{
const string z = ""str"";
#line 999
string u = z;
w += z.Length;
}
return w + x + y;
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.F", atLineNumber: 888);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(3, locals.Count);
VerifyLocal(testData, typeName, locals[0], "<>m0", "w");
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (bool V_0,
string V_1,
int V_2)
IL_0000: ldc.i4.3
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "v", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (bool V_0,
string V_1,
int V_2)
IL_0000: ldnull
IL_0001: ret
}");
locals.Free();
context = CreateMethodContext(
runtime,
methodName: "C.F",
atLineNumber: 999);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 5);
VerifyLocal(testData, typeName, locals[0], "<>m0", "w");
VerifyLocal(testData, typeName, locals[1], "<>m1", "u");
VerifyLocal(testData, typeName, locals[2], "<>m2", "y", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
VerifyLocal(testData, typeName, locals[3], "<>m3", "v", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
VerifyLocal(testData, typeName, locals[4], "<>m4", "z", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 6 (0x6)
.maxstack 1
.locals init (bool V_0,
string V_1, //u
int V_2)
IL_0000: ldstr ""str""
IL_0005: ret
}");
locals.Free();
});
}
[Fact]
public void ConstantEnum()
{
var source =
@"enum E { A, B }
class C
{
static void M(E x)
{
const E y = E.B;
}
static void Main()
{
M(E.A);
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugExe);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
var method = (MethodSymbol)testData.GetMethodData("<>x.<>m0").Method;
Assert.Equal(method.Parameters[0].Type, method.ReturnType);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "x", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ret
}");
method = (MethodSymbol)testData.GetMethodData("<>x.<>m1").Method;
Assert.Equal(method.Parameters[0].Type, method.ReturnType);
VerifyLocal(testData, "<>x", locals[1], "<>m1", "y", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldc.i4.1
IL_0001: ret
}");
locals.Free();
});
}
[Fact]
public void ConstantEnumAndTypeParameter()
{
var source =
@"class C<T>
{
enum E { A }
internal static void M<U>() where U : T
{
const C<T>.E t = E.A;
const C<U>.E u = 0;
}
}
class P
{
static void Main()
{
C<object>.M<string>();
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugExe);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 3);
VerifyLocal(testData, "<>x<T>", locals[0], "<>m0<U>", "t", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldc.i4.0
IL_0001: ret
}",
expectedGeneric: true);
VerifyLocal(testData, "<>x<T>", locals[1], "<>m1<U>", "u", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldc.i4.0
IL_0001: ret
}",
expectedGeneric: true);
VerifyLocal(testData, "<>x<T>", locals[2], "<>m2<U>", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 6 (0x6)
.maxstack 1
IL_0000: newobj ""<>c__TypeVariables<T, U>..ctor()""
IL_0005: ret
}",
expectedGeneric: true);
testData.GetMethodData("<>c__TypeVariables<T, U>..ctor").VerifyIL(
@"{
// Code size 7 (0x7)
.maxstack 1
IL_0000: ldarg.0
IL_0001: call ""object..ctor()""
IL_0006: ret
}");
locals.Free();
});
}
[Fact]
public void CapturedLocalsOutsideLambda()
{
var source =
@"class C
{
static void F(System.Func<object> f)
{
}
void M(C x)
{
var y = new C();
F(() => x ?? y ?? this);
if (x != null)
{
#line 999
var z = 6;
var w = 7;
F(() => y ?? (object)w);
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(
runtime,
methodName: "C.M",
atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
bool V_1,
C.<>c__DisplayClass1_1 V_2, //CS$<>8__locals1
int V_3) //z
IL_0000: ldloc.0
IL_0001: ldfld ""C C.<>c__DisplayClass1_0.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "x", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
bool V_1,
C.<>c__DisplayClass1_1 V_2, //CS$<>8__locals1
int V_3) //z
IL_0000: ldloc.0
IL_0001: ldfld ""C C.<>c__DisplayClass1_0.x""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "z", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
bool V_1,
C.<>c__DisplayClass1_1 V_2, //CS$<>8__locals1
int V_3) //z
IL_0000: ldloc.3
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[3], "<>m3", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
bool V_1,
C.<>c__DisplayClass1_1 V_2, //CS$<>8__locals1
int V_3) //z
IL_0000: ldloc.0
IL_0001: ldfld ""C C.<>c__DisplayClass1_0.y""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[4], "<>m4", "w", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
bool V_1,
C.<>c__DisplayClass1_1 V_2, //CS$<>8__locals1
int V_3) //z
IL_0000: ldloc.2
IL_0001: ldfld ""int C.<>c__DisplayClass1_1.w""
IL_0006: ret
}");
Assert.Equal(locals.Count, 5);
locals.Free();
});
}
[Fact]
public void CapturedLocalsInsideLambda()
{
var source =
@"class C
{
static void F(System.Func<object, object> f)
{
f(null);
}
void M()
{
var x = new object();
F(_1 =>
{
var y = new object();
F(_2 => y);
return x ?? this;
});
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<>c__DisplayClass1_1.<M>b__0");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
object V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""C C.<>c__DisplayClass1_1.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "_1", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
object V_1)
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
object V_1)
IL_0000: ldloc.0
IL_0001: ldfld ""object C.<>c__DisplayClass1_0.y""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[3], "<>m3", "x", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass1_0 V_0, //CS$<>8__locals0
object V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<>c__DisplayClass1_1.x""
IL_0006: ret
}");
Assert.Equal(locals.Count, 4);
locals.Free();
});
}
[Fact]
public void NestedLambdas()
{
var source =
@"using System;
class C
{
static void Main()
{
Func<object, object, object, object, Func<object, object, object, Func<object, object, Func<object, object>>>> f = (x1, x2, x3, x4) =>
{
if (x1 == null) return null;
return (y1, y2, y3) =>
{
if ((y1 ?? x2) == null) return null;
return (z1, z2) =>
{
if ((z1 ?? y2 ?? x3) == null) return null;
return w1 =>
{
if ((z2 ?? y3 ?? x4) == null) return null;
return w1;
};
};
};
};
f(1, 2, 3, 4)(5, 6, 7)(8, 9)(10);
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<>c.<Main>b__0_0");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x2", expectedILOpt:
@"
{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_0 V_0, //CS$<>8__locals0
bool V_1,
System.Func<object, object, object, System.Func<object, object, System.Func<object, object>>> V_2)
IL_0000: ldloc.0
IL_0001: ldfld ""object C.<>c__DisplayClass0_0.x2""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "x3");
VerifyLocal(testData, typeName, locals[2], "<>m2", "x4");
VerifyLocal(testData, typeName, locals[3], "<>m3", "x1");
Assert.Equal(locals.Count, 4);
locals.Free();
context = CreateMethodContext(
runtime,
methodName: "C.<>c__DisplayClass0_0.<Main>b__1");
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "y2", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_1 V_0, //CS$<>8__locals0
bool V_1,
System.Func<object, object, System.Func<object, object>> V_2)
IL_0000: ldloc.0
IL_0001: ldfld ""object C.<>c__DisplayClass0_1.y2""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "y3");
VerifyLocal(testData, typeName, locals[2], "<>m2", "y1");
VerifyLocal(testData, typeName, locals[3], "<>m3", "x2");
VerifyLocal(testData, typeName, locals[4], "<>m4", "x3", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_1 V_0, //CS$<>8__locals0
bool V_1,
System.Func<object, object, System.Func<object, object>> V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<>c__DisplayClass0_0.x3""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[5], "<>m5", "x4");
Assert.Equal(locals.Count, 6);
locals.Free();
context = CreateMethodContext(
runtime,
methodName: "C.<>c__DisplayClass0_1.<Main>b__2");
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "z2", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (C.<>c__DisplayClass0_2 V_0, //CS$<>8__locals0
bool V_1,
System.Func<object, object> V_2)
IL_0000: ldloc.0
IL_0001: ldfld ""object C.<>c__DisplayClass0_2.z2""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "z1");
VerifyLocal(testData, typeName, locals[2], "<>m2", "y2");
VerifyLocal(testData, typeName, locals[3], "<>m3", "y3");
VerifyLocal(testData, typeName, locals[4], "<>m4", "x2");
VerifyLocal(testData, typeName, locals[5], "<>m5", "x3", expectedILOpt:
@"{
// Code size 12 (0xc)
.maxstack 1
.locals init (C.<>c__DisplayClass0_2 V_0, //CS$<>8__locals0
bool V_1,
System.Func<object, object> V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""C.<>c__DisplayClass0_0 C.<>c__DisplayClass0_1.CS$<>8__locals1""
IL_0006: ldfld ""object C.<>c__DisplayClass0_0.x3""
IL_000b: ret
}");
VerifyLocal(testData, typeName, locals[6], "<>m6", "x4");
Assert.Equal(locals.Count, 7);
locals.Free();
context = CreateMethodContext(
runtime,
methodName: "C.<>c__DisplayClass0_2.<Main>b__3");
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "w1");
VerifyLocal(testData, typeName, locals[1], "<>m1", "z2", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (bool V_0,
object V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<>c__DisplayClass0_2.z2""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "y2");
VerifyLocal(testData, typeName, locals[3], "<>m3", "y3");
VerifyLocal(testData, typeName, locals[4], "<>m4", "x2");
VerifyLocal(testData, typeName, locals[5], "<>m5", "x3");
VerifyLocal(testData, typeName, locals[6], "<>m6", "x4", expectedILOpt:
@"{
// Code size 17 (0x11)
.maxstack 1
.locals init (bool V_0,
object V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""C.<>c__DisplayClass0_1 C.<>c__DisplayClass0_2.CS$<>8__locals2""
IL_0006: ldfld ""C.<>c__DisplayClass0_0 C.<>c__DisplayClass0_1.CS$<>8__locals1""
IL_000b: ldfld ""object C.<>c__DisplayClass0_0.x4""
IL_0010: ret
}");
Assert.Equal(locals.Count, 7);
locals.Free();
});
}
/// <summary>
/// Should not include "this" inside display class
/// instance method if "this" is not captured.
/// </summary>
[Fact]
public void NoThisInsideDisplayClassInstanceMethod()
{
var source =
@"using System;
class C
{
void M<T>(T x) where T : class
{
Func<object, Func<T, object>> f = y =>
{
return z =>
{
return x ?? (object)y ?? z;
};
};
f(2)(x);
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<>c__DisplayClass0_0.<M>b__0");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(3, locals.Count);
VerifyLocal(testData, "<>x<T>", locals[0], "<>m0", "y");
VerifyLocal(testData, "<>x<T>", locals[1], "<>m1", "x");
VerifyLocal(testData, "<>x<T>", locals[2], "<>m2", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
locals.Free();
context = CreateMethodContext(
runtime,
methodName: "C.<>c__DisplayClass0_1.<M>b__1");
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 4);
VerifyLocal(testData, "<>x<T>", locals[0], "<>m0", "z");
VerifyLocal(testData, "<>x<T>", locals[1], "<>m1", "y");
VerifyLocal(testData, "<>x<T>", locals[2], "<>m2", "x");
VerifyLocal(testData, "<>x<T>", locals[3], "<>m3", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
locals.Free();
});
}
[Fact]
public void GenericMethod()
{
var source =
@"class A<T>
{
struct B<U, V>
{
void M<W>(A<U>.B<V, object>[] o)
{
var t = default(T);
var u = default(U);
var w = default(W);
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "A.B.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 6);
VerifyLocal(testData, "<>x<T, U, V>", locals[0], "<>m0<W>", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (T V_0, //t
U V_1, //u
W V_2) //w
IL_0000: ldarg.0
IL_0001: ldobj ""A<T>.B<U, V>""
IL_0006: ret
}",
expectedGeneric: true);
var method = (MethodSymbol)testData.GetMethodData("<>x<T, U, V>.<>m0<W>").Method;
var containingType = method.ContainingType;
var returnType = (NamedTypeSymbol)method.ReturnType;
Assert.Equal(containingType.TypeParameters[1], returnType.TypeArguments[0]);
Assert.Equal(containingType.TypeParameters[2], returnType.TypeArguments[1]);
returnType = returnType.ContainingType;
Assert.Equal(containingType.TypeParameters[0], returnType.TypeArguments[0]);
VerifyLocal(testData, "<>x<T, U, V>", locals[1], "<>m1<W>", "o", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (T V_0, //t
U V_1, //u
W V_2) //w
IL_0000: ldarg.1
IL_0001: ret
}",
expectedGeneric: true);
method = (MethodSymbol)testData.GetMethodData("<>x<T, U, V>.<>m1<W>").Method;
// method.ReturnType: A<U>.B<V, object>[]
returnType = (NamedTypeSymbol)((ArrayTypeSymbol)method.ReturnType).ElementType;
Assert.Equal(containingType.TypeParameters[2], returnType.TypeArguments[0]);
returnType = returnType.ContainingType;
Assert.Equal(containingType.TypeParameters[1], returnType.TypeArguments[0]);
VerifyLocal(testData, "<>x<T, U, V>", locals[2], "<>m2<W>", "t", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (T V_0, //t
U V_1, //u
W V_2) //w
IL_0000: ldloc.0
IL_0001: ret
}",
expectedGeneric: true);
method = (MethodSymbol)testData.GetMethodData("<>x<T, U, V>.<>m2<W>").Method;
containingType = method.ContainingType;
Assert.Equal(containingType.TypeParameters[0], method.ReturnType);
VerifyLocal(testData, "<>x<T, U, V>", locals[3], "<>m3<W>", "u", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (T V_0, //t
U V_1, //u
W V_2) //w
IL_0000: ldloc.1
IL_0001: ret
}",
expectedGeneric: true);
method = (MethodSymbol)testData.GetMethodData("<>x<T, U, V>.<>m3<W>").Method;
containingType = method.ContainingType;
Assert.Equal(containingType.TypeParameters[1], method.ReturnType);
VerifyLocal(testData, "<>x<T, U, V>", locals[4], "<>m4<W>", "w", expectedILOpt:
@"{
// Code size 2 (0x2)
.maxstack 1
.locals init (T V_0, //t
U V_1, //u
W V_2) //w
IL_0000: ldloc.2
IL_0001: ret
}",
expectedGeneric: true);
method = (MethodSymbol)testData.GetMethodData("<>x<T, U, V>.<>m4<W>").Method;
Assert.Equal(method.TypeParameters[0], method.ReturnType);
VerifyLocal(testData, "<>x<T, U, V>", locals[5], "<>m5<W>", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 6 (0x6)
.maxstack 1
.locals init (T V_0, //t
U V_1, //u
W V_2) //w
IL_0000: newobj ""<>c__TypeVariables<T, U, V, W>..ctor()""
IL_0005: ret
}",
expectedGeneric: true);
method = (MethodSymbol)testData.GetMethodData("<>x<T, U, V>.<>m5<W>").Method;
returnType = (NamedTypeSymbol)method.ReturnType;
Assert.Equal(containingType.TypeParameters[0], returnType.TypeArguments[0]);
Assert.Equal(containingType.TypeParameters[1], returnType.TypeArguments[1]);
Assert.Equal(containingType.TypeParameters[2], returnType.TypeArguments[2]);
Assert.Equal(method.TypeParameters[0], returnType.TypeArguments[3]);
// Verify <>c__TypeVariables type was emitted (#976772).
using (var metadata = ModuleMetadata.CreateFromImage(ImmutableArray.CreateRange(assembly)))
{
var reader = metadata.MetadataReader;
var typeDef = reader.GetTypeDef("<>c__TypeVariables");
reader.CheckTypeParameters(typeDef.GetGenericParameters(), "T", "U", "V", "W");
}
locals.Free();
});
}
[Fact]
public void GenericLambda()
{
var source =
@"class C<T> where T : class
{
static void M<U>(T t)
{
var u = default(U);
System.Func<object> f = () => { return t ?? (object)u; };
f();
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<>c__DisplayClass0_0.<M>b__0");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 3);
VerifyLocal(testData, "<>x<T, U>", locals[0], "<>m0", "t");
VerifyLocal(testData, "<>x<T, U>", locals[1], "<>m1", "u", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (object V_0)
IL_0000: ldarg.0
IL_0001: ldfld ""U C<T>.<>c__DisplayClass0_0<U>.u""
IL_0006: ret
}",
expectedGeneric: false);
VerifyLocal(testData, "<>x<T, U>", locals[2], "<>m2", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
var method = (MethodSymbol)testData.GetMethodData("<>x<T, U>.<>m1").Method;
var containingType = method.ContainingType;
Assert.Equal(containingType.TypeParameters[1], method.ReturnType);
locals.Free();
});
}
[Fact]
public void Iterator_InstanceMethod()
{
var source =
@"using System.Collections;
class C
{
private readonly object[] c;
internal C(object[] c)
{
this.c = c;
}
internal IEnumerable F()
{
foreach (var o in c)
{
#line 999
yield return o;
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<F>d__2.MoveNext", atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0)
IL_0000: ldarg.0
IL_0001: ldfld ""C C.<F>d__2.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "o", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<F>d__2.<o>5__3""
IL_0006: ret
}");
locals.Free();
});
}
[Fact]
public void Iterator_StaticMethod_Generic()
{
var source =
@"using System.Collections.Generic;
class C
{
static IEnumerable<T> F<T>(T[] o)
{
for (int i = 0; i < o.Length; i++)
{
#line 999
T t = default(T);
yield return t;
yield return o[i];
}
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(
runtime,
methodName: "C.<F>d__0.MoveNext",
atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, "<>x<T>", locals[0], "<>m0", "o", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
int V_1,
bool V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""T[] C.<F>d__0<T>.o""
IL_0006: ret
}");
VerifyLocal(testData, "<>x<T>", locals[1], "<>m1", "i", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
int V_1,
bool V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""int C.<F>d__0<T>.<i>5__1""
IL_0006: ret
}");
VerifyLocal(testData, "<>x<T>", locals[2], "<>m2", "t", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
int V_1,
bool V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""T C.<F>d__0<T>.<t>5__2""
IL_0006: ret
}");
VerifyLocal(testData, "<>x<T>", locals[3], "<>m3", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
Assert.Equal(locals.Count, 4);
locals.Free();
});
}
[Fact]
public void Async_InstanceMethod_Generic()
{
var source =
@"using System.Threading.Tasks;
struct S<T> where T : class
{
T x;
internal async Task<object> F<U>(U y) where U : class
{
var z = default(T);
return this.x ?? (object)y ?? z;
}
}";
var compilation0 = CreateCompilationWithMscorlib45(
source,
options: TestOptions.DebugDll,
references: new[] { SystemRef_v4_0_30319_17929, SystemCoreRef_v4_0_30319_17929, CSharpRef });
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "S.<F>d__1.MoveNext");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, "<>x<T, U>", locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
System.Exception V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""S<T> S<T>.<F>d__1<U>.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, "<>x<T, U>", locals[1], "<>m1", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
System.Exception V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""U S<T>.<F>d__1<U>.y""
IL_0006: ret
}");
VerifyLocal(testData, "<>x<T, U>", locals[2], "<>m2", "z", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
object V_1,
System.Exception V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""T S<T>.<F>d__1<U>.<z>5__1""
IL_0006: ret
}");
VerifyLocal(testData, "<>x<T, U>", locals[3], "<>m3", "<>TypeVariables", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult);
Assert.Equal(locals.Count, 4);
locals.Free();
});
}
[Fact]
public void Async_StaticMethod_01()
{
var source =
@"using System.Threading.Tasks;
class C
{
static async Task<object> F(object o)
{
return o;
}
static async Task M(object x)
{
var y = await F(x);
await F(y);
}
}";
var compilation0 = CreateCompilationWithMscorlib45(
source,
options: TestOptions.DebugDll,
references: new[] { SystemRef_v4_0_30319_17929, SystemCoreRef_v4_0_30319_17929, CSharpRef });
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "x", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter<object> V_1,
object V_2,
C.<M>d__1 V_3,
System.Runtime.CompilerServices.TaskAwaiter<object> V_4,
System.Exception V_5)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.x""
IL_0006: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter<object> V_1,
object V_2,
C.<M>d__1 V_3,
System.Runtime.CompilerServices.TaskAwaiter<object> V_4,
System.Exception V_5)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.<y>5__1""
IL_0006: ret
}");
Assert.Equal(locals.Count, 2);
locals.Free();
});
}
[Fact]
public void Async_StaticMethod_02()
{
var source =
@"using System.Threading.Tasks;
class C
{
static async Task<object> F(object o)
{
return o;
}
static async Task M(object x)
{
{
#line 1000
int y = (int)await F(x);
await F(y);
}
{
#line 2000
long y = (long)await F(x);
await F(y);
}
}
}";
var compilation0 = CreateCompilationWithMscorlib45(
source,
options: TestOptions.DebugDll,
references: new[] { SystemRef_v4_0_30319_17929, SystemCoreRef_v4_0_30319_17929, CSharpRef });
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext", atLineNumber: 1000);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "x", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter<object> V_1,
object V_2,
C.<M>d__1 V_3,
System.Runtime.CompilerServices.TaskAwaiter<object> V_4,
System.Runtime.CompilerServices.TaskAwaiter<object> V_5,
System.Runtime.CompilerServices.TaskAwaiter<object> V_6,
System.Exception V_7)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.x""
IL_0006: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter<object> V_1,
object V_2,
C.<M>d__1 V_3,
System.Runtime.CompilerServices.TaskAwaiter<object> V_4,
System.Runtime.CompilerServices.TaskAwaiter<object> V_5,
System.Runtime.CompilerServices.TaskAwaiter<object> V_6,
System.Exception V_7)
IL_0000: ldarg.0
IL_0001: ldfld ""int C.<M>d__1.<y>5__1""
IL_0006: ret
}");
Assert.Equal(locals.Count, 2);
locals.Free();
context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext", atLineNumber: 2000);
testData = new CompilationTestData();
locals = ArrayBuilder<LocalAndMethod>.GetInstance();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "x", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter<object> V_1,
object V_2,
C.<M>d__1 V_3,
System.Runtime.CompilerServices.TaskAwaiter<object> V_4,
System.Runtime.CompilerServices.TaskAwaiter<object> V_5,
System.Runtime.CompilerServices.TaskAwaiter<object> V_6,
System.Exception V_7)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.x""
IL_0006: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter<object> V_1,
object V_2,
C.<M>d__1 V_3,
System.Runtime.CompilerServices.TaskAwaiter<object> V_4,
System.Runtime.CompilerServices.TaskAwaiter<object> V_5,
System.Runtime.CompilerServices.TaskAwaiter<object> V_6,
System.Exception V_7)
IL_0000: ldarg.0
IL_0001: ldfld ""long C.<M>d__1.<y>5__3""
IL_0006: ret
}");
Assert.Equal(locals.Count, 2);
locals.Free();
});
}
[WorkItem(995976, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/995976")]
[WorkItem(10649, "https://github.com/dotnet/roslyn/issues/10649")]
[Fact]
public void AsyncAndLambda()
{
var source =
@"using System;
using System.Threading.Tasks;
class C
{
static async Task F()
{
}
static void G(Action a)
{
a();
}
async static Task<int> M(int x)
{
int y = x + 1;
await F();
G(() => { x += 2; y += 2; });
x += y;
return x;
}
}";
var compilation0 = CreateCompilationWithMscorlib45(
source,
options: TestOptions.DebugDll,
references: new[] { SystemRef_v4_0_30319_17929, SystemCoreRef_v4_0_30319_17929, CSharpRef });
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__2.MoveNext");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "x", expectedILOpt:
@"{
// Code size 12 (0xc)
.maxstack 1
.locals init (int V_0,
int V_1,
System.Runtime.CompilerServices.TaskAwaiter V_2,
C.<M>d__2 V_3,
System.Exception V_4)
IL_0000: ldarg.0
IL_0001: ldfld ""C.<>c__DisplayClass2_0 C.<M>d__2.<>8__1""
IL_0006: ldfld ""int C.<>c__DisplayClass2_0.x""
IL_000b: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "y", expectedILOpt:
@"{
// Code size 12 (0xc)
.maxstack 1
.locals init (int V_0,
int V_1,
System.Runtime.CompilerServices.TaskAwaiter V_2,
C.<M>d__2 V_3,
System.Exception V_4)
IL_0000: ldarg.0
IL_0001: ldfld ""C.<>c__DisplayClass2_0 C.<M>d__2.<>8__1""
IL_0006: ldfld ""int C.<>c__DisplayClass2_0.y""
IL_000b: ret
}");
locals.Free();
});
}
[WorkItem(2240, "https://github.com/dotnet/roslyn/issues/2240")]
[Fact]
public void AsyncLambda()
{
var source =
@"using System;
using System.Threading.Tasks;
class C
{
static void M()
{
Func<int, Task> f = async (x) =>
{
var y = 42;
};
}
}";
var compilation0 = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, methodName: "C.<>c.<<M>b__0_0>d.MoveNext");
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var testData = new CompilationTestData();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "x", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Exception V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""int C.<>c.<<M>b__0_0>d.x""
IL_0006: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "y", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Exception V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""int C.<>c.<<M>b__0_0>d.<y>5__1""
IL_0006: ret
}");
locals.Free();
});
}
[WorkItem(996571, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/996571")]
[Fact]
public void MissingReference()
{
var source0 =
@"public class A
{
}
public struct B
{
}";
var source1 =
@"class C
{
static void M(A a, B b, C c)
{
}
}";
var compilation0 = CreateStandardCompilation(
source0,
options: TestOptions.DebugDll,
assemblyName: "Comp1");
var compilation1 = CreateStandardCompilation(
source1,
options: TestOptions.DebugDll,
references: new[] { compilation0.EmitToImageReference() });
// no reference to compilation0
WithRuntimeInstance(compilation1, new[] { MscorlibRef }, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData, expectedDiagnostics: new[]
{
// error CS0012: The type 'A' is defined in an assembly that is not referenced. You must add a reference to assembly 'Comp1, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null'.
Diagnostic(ErrorCode.ERR_NoTypeDef).WithArguments("A", "Comp1, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null").WithLocation(1, 1)
});
Assert.Equal(locals.Count, 0);
locals.Free();
});
}
[WorkItem(996571, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/996571")]
[Fact]
public void MissingReference_2()
{
var source0 =
@"public interface I
{
}";
var source1 =
@"class C
{
static void M<T>(object o) where T : I
{
}
}";
var compilation0 = CreateStandardCompilation(
source0,
options: TestOptions.DebugDll,
assemblyName: "Comp1");
var compilation1 = CreateStandardCompilation(
source1,
options: TestOptions.DebugDll,
references: new[] { compilation0.EmitToImageReference() });
// no reference to compilation0
WithRuntimeInstance(compilation1, new[] { MscorlibRef }, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData, expectedDiagnostics: new[]
{
// error CS0012: The type 'I' is defined in an assembly that is not referenced. You must add a reference to assembly 'Comp1, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null'.
Diagnostic(ErrorCode.ERR_NoTypeDef).WithArguments("I", "Comp1, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null").WithLocation(1, 1)
});
Assert.Equal(locals.Count, 0);
locals.Free();
});
}
[Fact]
public void AssignmentToLockLocal()
{
var source = @"
class C
{
void M(object o)
{
lock(o)
{
#line 999
int x = 1;
}
}
}
";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(
runtime,
methodName: "C.M",
atLineNumber: 999);
string error;
var testData = new CompilationTestData();
context.CompileExpression("o = null", out error, testData);
Assert.Null(error); // In regular code, there would be an error about modifying a lock local.
testData.GetMethodData("<>x.<>m0").VerifyIL(
@"{
// Code size 5 (0x5)
.maxstack 2
.locals init (object V_0,
bool V_1,
int V_2) //x
IL_0000: ldnull
IL_0001: dup
IL_0002: starg.s V_1
IL_0004: ret
}");
testData = new CompilationTestData();
context.CompileAssignment("o", "null", out error, testData);
Assert.Null(error); // In regular code, there would be an error about modifying a lock local.
testData.GetMethodData("<>x.<>m0").VerifyIL(
@"{
// Code size 4 (0x4)
.maxstack 1
.locals init (object V_0,
bool V_1,
int V_2) //x
IL_0000: ldnull
IL_0001: starg.s V_1
IL_0003: ret
}");
});
}
[WorkItem(1015887, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1015887")]
[Fact]
public void LocalDoubleConstant()
{
var source = @"
class C
{
static void M()
{
const double d = 2.74745778612482E-266;
}
}
";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(1, locals.Count);
VerifyLocal(testData, typeName, locals[0], "<>m0", "d", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 10 (0xa)
.maxstack 1
IL_0000: ldc.r8 2.74745778612482E-266
IL_0009: ret
}");
});
}
[WorkItem(1015887, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1015887")]
[Fact]
public void LocalByteConstant()
{
var source = @"
class C
{
static void M()
{
const byte b = 254;
byte c = 0;
}
}
";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
string error;
context.CompileAssignment("c", "(byte)(b + 3)", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(@"
{
// Code size 3 (0x3)
.maxstack 1
.locals init (byte V_0) //c
IL_0000: ldc.i4.1
IL_0001: stloc.0
IL_0002: ret
}
");
});
}
[WorkItem(1015887, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1015887")]
[Fact]
public void LocalDecimalConstant()
{
var source = @"
class C
{
static void M()
{
const decimal d = 1.5M;
}
}
";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, methodName: "C.M");
string errorMessage;
var testData = new CompilationTestData();
context.CompileAssignment("d", "Nothing", out errorMessage, testData);
Assert.Equal("error CS0131: The left-hand side of an assignment must be a variable, property or indexer", errorMessage);
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(1, locals.Count);
VerifyLocal(testData, typeName, locals[0], "<>m0", "d", expectedFlags: DkmClrCompilationResultFlags.ReadOnlyResult, expectedILOpt:
@"{
// Code size 12 (0xc)
.maxstack 5
IL_0000: ldc.i4.s 15
IL_0002: ldc.i4.0
IL_0003: ldc.i4.0
IL_0004: ldc.i4.0
IL_0005: ldc.i4.1
IL_0006: newobj ""decimal..ctor(int, int, int, bool, byte)""
IL_000b: ret
}");
});
}
[Fact, WorkItem(1022165, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1022165"), WorkItem(1028883, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1028883"), WorkItem(1034204, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1034204")]
public void KeywordIdentifiers()
{
var source = @"
class C
{
void M(int @null)
{
int @this = 1;
char @true = 't';
string @namespace = ""NS"";
}
}";
var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.M");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 5);
VerifyLocal(testData, typeName, locals[0], "<>m0", "this", expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (int V_0, //this
char V_1, //true
string V_2) //namespace
IL_0000: ldarg.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "@null", expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (int V_0, //this
char V_1, //true
string V_2) //namespace
IL_0000: ldarg.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[2], "<>m2", "@this", expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (int V_0, //this
char V_1, //true
string V_2) //namespace
IL_0000: ldloc.0
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[3], "<>m3", "@true", expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (int V_0, //this
char V_1, //true
string V_2) //namespace
IL_0000: ldloc.1
IL_0001: ret
}");
VerifyLocal(testData, typeName, locals[4], "<>m4", "@namespace", expectedILOpt: @"
{
// Code size 2 (0x2)
.maxstack 1
.locals init (int V_0, //this
char V_1, //true
string V_2) //namespace
IL_0000: ldloc.2
IL_0001: ret
}");
locals.Free();
});
}
[Fact]
public void ExtensionIterator()
{
var source = @"
static class C
{
static System.Collections.IEnumerable F(this int x)
{
yield return x;
}
}
";
var expectedIL = @"
{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0)
IL_0000: ldarg.0
IL_0001: ldfld ""int C.<F>d__0.x""
IL_0006: ret
}";
var compilation0 = CreateCompilationWithMscorlibAndSystemCore(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<F>d__0.MoveNext");
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.NotNull(assembly);
Assert.NotEqual(assembly.Count, 0);
Assert.Equal(locals.Count, 1);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: expectedIL);
Assert.Equal(SpecialType.System_Int32, testData.GetMethodData(typeName + ".<>m0").Method.ReturnType.SpecialType);
locals.Free();
testData = new CompilationTestData();
string error;
context.CompileExpression("x", out error, testData);
Assert.Null(error);
var methodData = testData.GetMethodData("<>x.<>m0");
methodData.VerifyIL(expectedIL);
Assert.Equal(SpecialType.System_Int32, methodData.Method.ReturnType.SpecialType);
});
}
[Fact, WorkItem(1063254, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1063254")]
public void OverloadedIteratorDifferentParameterTypes_ArgumentsOnly()
{
var source = @"
using System.Collections.Generic;
class C
{
IEnumerable<int> M1(int x, int y)
{
int local = 0;
yield return local;
}
IEnumerable<float> M1(int x, float y)
{
float local = 0.0F;
yield return local;
}
static IEnumerable<float> M2(int x, float y)
{
float local = 0;
yield return local;
}
static IEnumerable<T> M2<T>(int x, T y)
{
T local = default(T);
yield return local;
}
static IEnumerable<int> M2(int x, int y)
{
int local = 0;
yield return local;
}
}";
var compilation = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
string displayClassName;
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
var ilTemplate = @"
{{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0)
IL_0000: ldarg.0
IL_0001: ldfld ""{0} C.{1}.{2}""
IL_0006: ret
}}";
// M1(int, int)
displayClassName = "<M1>d__0";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "y"));
locals.Clear();
// M1(int, float)
displayClassName = "<M1>d__1";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "float", displayClassName, "y"));
locals.Clear();
// M2(int, float)
displayClassName = "<M2>d__2";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "float", displayClassName, "y"));
locals.Clear();
// M2(int, T)
displayClassName = "<M2>d__3";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
typeName += "<T>";
displayClassName += "<T>";
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "T", displayClassName, "y"));
locals.Clear();
// M2(int, int)
displayClassName = "<M2>d__4";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "int", displayClassName, "y"));
locals.Clear();
locals.Free();
});
}
[Fact, WorkItem(1063254, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1063254")]
public void OverloadedAsyncDifferentParameterTypes_ArgumentsOnly()
{
var source = @"
using System.Threading.Tasks;
class C
{
async Task<int> M1(int x)
{
int local = 0;
return local;
}
async Task<float> M1(int x, float y)
{
float local = 0.0F;
return local;
}
static async Task<float> M2(int x, float y)
{
float local = 0;
return local;
}
static async Task<T> M2<T>(T x)
{
T local = default(T);
return local;
}
static async Task<int> M2(int x)
{
int local = 0;
return local;
}
}";
var compilation = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
string displayClassName;
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
var ilTemplate = @"
{{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
{0} V_1,
System.Exception V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""{1} C.{2}.{3}""
IL_0006: ret
}}";
// M1(int)
displayClassName = "<M1>d__0";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", "int", displayClassName, "x"));
locals.Clear();
// M1(int, float)
displayClassName = "<M1>d__1";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "float", "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "float", "float", displayClassName, "y"));
locals.Clear();
// M2(int, float)
displayClassName = "<M2>d__2";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "float", "int", displayClassName, "x"));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(ilTemplate, "float", "float", displayClassName, "y"));
locals.Clear();
// M2(T)
displayClassName = "<M2>d__3";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T>", locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "T", "T", displayClassName + "<T>", "x"));
locals.Clear();
// M2(int)
displayClassName = "<M2>d__4";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(ilTemplate, "int", "int", displayClassName, "x"));
locals.Clear();
locals.Free();
});
}
[Fact, WorkItem(1063254, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1063254")]
public void MultipleLambdasDifferentParameterNames_ArgumentsOnly()
{
var source = @"
using System;
class C
{
void M1(int x)
{
Action<int> a = y => x.ToString();
Func<int, int> f = z => x;
}
static void M2<T>(int x)
{
Action<int> a = y => y.ToString();
Func<int, int> f = z => z;
Func<T, T> g = t => t;
}
}";
var compilation = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
string displayClassName;
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
var voidRetILTemplate = @"
{{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.{0}
IL_0001: ret
}}";
var funcILTemplate = @"
{{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.{0}
IL_0001: ret
}}";
// y => x.ToString()
displayClassName = "<>c__DisplayClass0_0";
GetLocals(runtime, "C." + displayClassName + ".<M1>b__0", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "y", expectedILOpt: string.Format(voidRetILTemplate, 1));
locals.Clear();
// z => x
displayClassName = "<>c__DisplayClass0_0";
GetLocals(runtime, "C." + displayClassName + ".<M1>b__1", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "z", expectedILOpt: string.Format(funcILTemplate, 1));
locals.Clear();
// y => y.ToString()
displayClassName = "<>c__1";
GetLocals(runtime, "C." + displayClassName + ".<M2>b__1_0", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T>", locals[0], "<>m0", "y", expectedILOpt: string.Format(voidRetILTemplate, 1));
locals.Clear();
// z => z
displayClassName = "<>c__1";
GetLocals(runtime, "C." + displayClassName + ".<M2>b__1_1", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T>", locals[0], "<>m0", "z", expectedILOpt: string.Format(funcILTemplate, 1));
locals.Clear();
// t => t
displayClassName = "<>c__1";
GetLocals(runtime, "C." + displayClassName + ".<M2>b__1_2", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T>", locals[0], "<>m0", "t", expectedILOpt: string.Format(funcILTemplate, 1));
locals.Clear();
locals.Free();
});
}
[Fact, WorkItem(1063254, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1063254")]
public void OverloadedRegularMethodDifferentParameterTypes_ArgumentsOnly()
{
var source = @"
class C
{
void M1(int x, int y)
{
int local = 0;
}
string M1(int x, string y)
{
string local = null;
return local;
}
static void M2(int x, string y)
{
string local = null;
}
static T M2<T>(int x, T y)
{
T local = default(T);
return local;
}
static int M2(int x, ref int y)
{
int local = 0;
return local;
}
}";
var compilation = CreateStandardCompilation(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
var voidRetILTemplate = @"
{{
// Code size 2 (0x2)
.maxstack 1
.locals init ({0} V_0) //local
IL_0000: ldarg.{1}
IL_0001: ret
}}";
var funcILTemplate = @"
{{
// Code size 2 (0x2)
.maxstack 1
.locals init ({0} V_0, //local
{0} V_1)
IL_0000: ldarg.{1}
IL_0001: ret
}}";
var refParamILTemplate = @"
{{
// Code size 3 (0x3)
.maxstack 1
.locals init ({0} V_0, //local
{0} V_1)
IL_0000: ldarg.{1}
IL_0001: ldind.i4
IL_0002: ret
}}";
// M1(int, int)
GetLocals(runtime, "C.M1(Int32,Int32)", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(voidRetILTemplate, "int", 1));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(voidRetILTemplate, "int", 2));
locals.Clear();
// M1(int, string)
GetLocals(runtime, "C.M1(Int32,String)", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(funcILTemplate, "string", 1));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(funcILTemplate, "string", 2));
locals.Clear();
// M2(int, string)
GetLocals(runtime, "C.M2(Int32,String)", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(voidRetILTemplate, "string", 0));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(voidRetILTemplate, "string", 1));
locals.Clear();
// M2(int, T)
GetLocals(runtime, "C.M2(Int32,T)", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0<T>", "x", expectedILOpt: string.Format(funcILTemplate, "T", 0), expectedGeneric: true);
VerifyLocal(testData, typeName, locals[1], "<>m1<T>", "y", expectedILOpt: string.Format(funcILTemplate, "T", 1), expectedGeneric: true);
locals.Clear();
// M2(int, int)
GetLocals(runtime, "C.M2(Int32,Int32)", argumentsOnly: true, locals: locals, count: 2, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "x", expectedILOpt: string.Format(funcILTemplate, "int", 0));
VerifyLocal(testData, typeName, locals[1], "<>m1", "y", expectedILOpt: string.Format(refParamILTemplate, "int", 1));
locals.Clear();
locals.Free();
});
}
[Fact, WorkItem(1063254, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1063254")]
public void MultipleMethodsLocalConflictsWithParameterName_ArgumentsOnly()
{
var source = @"
using System.Collections.Generic;
using System.Threading.Tasks;
class C<T>
{
IEnumerable<int> M1()
{
int x = 0;
yield return x;
}
IEnumerable<int> M1(int x)
{
yield return x;
}
IEnumerable<int> M2(int x)
{
yield return x;
}
IEnumerable<int> M2()
{
int x = 0;
yield return x;
}
static async Task<T> M3()
{
T x = default(T);
return x;
}
static async Task<T> M3<T>(T x)
{
return x;
}
static async Task<T> M4<T>(T x)
{
return x;
}
static async Task<T> M4()
{
T x = default(T);
return x;
}
}";
var compilation = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
string displayClassName;
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
var iteratorILTemplate = @"
{{
// Code size 7 (0x7)
.maxstack 1
.locals init ({0} V_0)
IL_0000: ldarg.0
IL_0001: ldfld ""{0} C<T>.{1}.{2}""
IL_0006: ret
}}";
var asyncILTemplate = @"
{{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
{0} V_1,
System.Exception V_2)
IL_0000: ldarg.0
IL_0001: ldfld ""{0} C<T>.{1}.{2}""
IL_0006: ret
}}";
// M1()
displayClassName = "<M1>d__0";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 0, typeName: out typeName, testData: out testData);
locals.Clear();
// M1(int)
displayClassName = "<M1>d__1";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T>", locals[0], "<>m0", "x", expectedILOpt: string.Format(iteratorILTemplate, "int", displayClassName, "x"));
locals.Clear();
// M2(int)
displayClassName = "<M2>d__2";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T>", locals[0], "<>m0", "x", expectedILOpt: string.Format(iteratorILTemplate, "int", displayClassName, "x"));
locals.Clear();
// M2()
displayClassName = "<M2>d__3";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 0, typeName: out typeName, testData: out testData);
locals.Clear();
// M3()
displayClassName = "<M3>d__4";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 0, typeName: out typeName, testData: out testData);
locals.Clear();
// M3(int)
displayClassName = "<M3>d__5";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T, T>", locals[0], "<>m0", "x", expectedILOpt: string.Format(asyncILTemplate, "T", displayClassName + "<T>", "x"));
locals.Clear();
// M4(int)
displayClassName = "<M4>d__6";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName + "<T, T>", locals[0], "<>m0", "x", expectedILOpt: string.Format(asyncILTemplate, "T", displayClassName + "<T>", "x"));
locals.Clear();
// M4()
displayClassName = "<M4>d__7";
GetLocals(runtime, "C." + displayClassName + ".MoveNext", argumentsOnly: true, locals: locals, count: 0, typeName: out typeName, testData: out testData);
locals.Clear();
locals.Free();
});
}
[WorkItem(1115030, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1115030")]
[Fact]
public void CatchInAsyncStateMachine()
{
var source =
@"using System;
using System.Threading.Tasks;
class C
{
static object F()
{
throw new ArgumentException();
}
static async Task M()
{
object o;
try
{
o = F();
}
catch (Exception e)
{
#line 999
o = e;
}
}
}";
var compilation = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext", atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "o", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Exception V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.<o>5__1""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "e", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Exception V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""System.Exception C.<M>d__1.<e>5__2""
IL_0006: ret
}");
locals.Free();
});
}
[WorkItem(1115030, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1115030")]
[Fact]
public void CatchInIteratorStateMachine()
{
var source =
@"using System;
using System.Collections;
class C
{
static object F()
{
throw new ArgumentException();
}
static IEnumerable M()
{
object o;
try
{
o = F();
}
catch (Exception e)
{
#line 999
o = e;
}
yield return o;
}
}";
var compilation0 = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation0, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__1.MoveNext", atLineNumber: 999);
var testData = new CompilationTestData();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "o", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Exception V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""object C.<M>d__1.<o>5__1""
IL_0006: ret
}");
VerifyLocal(testData, typeName, locals[1], "<>m1", "e", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Exception V_1)
IL_0000: ldarg.0
IL_0001: ldfld ""System.Exception C.<M>d__1.<e>5__2""
IL_0006: ret
}");
locals.Free();
});
}
[Fact]
public void DuplicateEditorBrowsableAttributes()
{
const string libSource = @"
namespace System.ComponentModel
{
public enum EditorBrowsableState
{
Always = 0,
Never = 1,
Advanced = 2
}
[AttributeUsage(AttributeTargets.All)]
internal sealed class EditorBrowsableAttribute : Attribute
{
public EditorBrowsableAttribute(EditorBrowsableState state) { }
}
}
";
const string source = @"
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
class C
{
void M()
{
}
}
";
var libRef = CreateStandardCompilation(libSource).EmitToImageReference();
var comp = CreateStandardCompilation(source, new[] { SystemRef }, TestOptions.DebugDll);
WithRuntimeInstance(comp, new[] { MscorlibRef, SystemRef, SystemCoreRef, SystemXmlLinqRef, libRef }, runtime =>
{
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
GetLocals(runtime, "C.M", argumentsOnly: false, locals: locals, count: 1, typeName: out typeName, testData: out testData);
Assert.Equal("this", locals.Single().LocalName);
locals.Free();
});
}
[WorkItem(2089, "https://github.com/dotnet/roslyn/issues/2089")]
[Fact]
public void MultipleThisFields()
{
var source =
@"using System;
using System.Threading.Tasks;
class C
{
async static Task F(Action a)
{
a();
}
void G(string s)
{
}
async void M()
{
string s = null;
await F(() => G(s));
}
}";
var compilation = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__2.MoveNext()");
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var testData = new CompilationTestData();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "this", expectedILOpt:
@"{
// Code size 7 (0x7)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter V_1,
C.<M>d__2 V_2,
System.Exception V_3)
IL_0000: ldarg.0
IL_0001: ldfld ""C C.<M>d__2.<>4__this""
IL_0006: ret
}");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "s", expectedILOpt:
@"{
// Code size 12 (0xc)
.maxstack 1
.locals init (int V_0,
System.Runtime.CompilerServices.TaskAwaiter V_1,
C.<M>d__2 V_2,
System.Exception V_3)
IL_0000: ldarg.0
IL_0001: ldfld ""C.<>c__DisplayClass2_0 C.<M>d__2.<>8__1""
IL_0006: ldfld ""string C.<>c__DisplayClass2_0.s""
IL_000b: ret
}");
locals.Free();
});
}
[WorkItem(2336, "https://github.com/dotnet/roslyn/issues/2336")]
[Fact]
public void LocalsOnAsyncMethodClosingBrace()
{
var source =
@"using System;
using System.Threading.Tasks;
class C
{
async void M()
{
string s = null;
#line 999
}
}";
var compilation = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll);
WithRuntimeInstance(compilation, runtime =>
{
var context = CreateMethodContext(runtime, "C.<M>d__0.MoveNext()", atLineNumber: 999);
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
string typeName;
var testData = new CompilationTestData();
context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData);
Assert.Equal(locals.Count, 2);
VerifyLocal(testData, "<>x", locals[0], "<>m0", "this");
VerifyLocal(testData, "<>x", locals[1], "<>m1", "s");
locals.Free();
});
}
[WorkItem(1139013, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1139013")]
[Fact]
public void TransparentIdentifiers_FromParameter()
{
const string source = @"
using System.Linq;
class C
{
void M(string[] args)
{
var concat =
from x in args
let y = x.ToString()
let z = x.GetHashCode()
select x + y + z;
}
}
";
const string methodName = "C.<>c.<M>b__0_2";
const string zIL = @"
{
// Code size 7 (0x7)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ldfld ""int <>f__AnonymousType1<<>f__AnonymousType0<string, string>, int>.<z>i__Field""
IL_0006: ret
}
";
const string xIL = @"
{
// Code size 12 (0xc)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ldfld ""<>f__AnonymousType0<string, string> <>f__AnonymousType1<<>f__AnonymousType0<string, string>, int>.<<>h__TransparentIdentifier0>i__Field""
IL_0006: ldfld ""string <>f__AnonymousType0<string, string>.<x>i__Field""
IL_000b: ret
}
";
const string yIL = @"
{
// Code size 12 (0xc)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ldfld ""<>f__AnonymousType0<string, string> <>f__AnonymousType1<<>f__AnonymousType0<string, string>, int>.<<>h__TransparentIdentifier0>i__Field""
IL_0006: ldfld ""string <>f__AnonymousType0<string, string>.<y>i__Field""
IL_000b: ret
}
";
var comp = CreateStandardCompilation(source, new[] { SystemCoreRef }, TestOptions.DebugDll);
WithRuntimeInstance(comp, runtime =>
{
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
GetLocals(runtime, methodName, argumentsOnly: false, locals: locals, count: 3, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "z", expectedILOpt: zIL);
VerifyLocal(testData, typeName, locals[1], "<>m1", "x", expectedILOpt: xIL);
VerifyLocal(testData, typeName, locals[2], "<>m2", "y", expectedILOpt: yIL);
locals.Free();
var context = CreateMethodContext(runtime, methodName);
string error;
testData = new CompilationTestData();
context.CompileExpression("z", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(zIL);
testData = new CompilationTestData();
context.CompileExpression("x", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(xIL);
testData = new CompilationTestData();
context.CompileExpression("y", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(yIL);
});
}
[WorkItem(1139013, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1139013")]
[Fact]
public void TransparentIdentifiers_FromDisplayClassField()
{
const string source = @"
using System.Linq;
class C
{
void M(string[] args)
{
var concat =
from x in args
let y = x.ToString()
let z = x.GetHashCode()
select x.Select(c => y + z);
}
}
";
const string methodName = "C.<>c__DisplayClass0_0.<M>b__3";
const string cIL = @"
{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ret
}
";
const string zIL = @"
{
// Code size 12 (0xc)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ldfld ""<>f__AnonymousType1<<>f__AnonymousType0<string, string>, int> C.<>c__DisplayClass0_0.<>h__TransparentIdentifier1""
IL_0006: ldfld ""int <>f__AnonymousType1<<>f__AnonymousType0<string, string>, int>.<z>i__Field""
IL_000b: ret
}
";
const string xIL = @"
{
// Code size 17 (0x11)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ldfld ""<>f__AnonymousType1<<>f__AnonymousType0<string, string>, int> C.<>c__DisplayClass0_0.<>h__TransparentIdentifier1""
IL_0006: ldfld ""<>f__AnonymousType0<string, string> <>f__AnonymousType1<<>f__AnonymousType0<string, string>, int>.<<>h__TransparentIdentifier0>i__Field""
IL_000b: ldfld ""string <>f__AnonymousType0<string, string>.<x>i__Field""
IL_0010: ret
}
";
const string yIL = @"
{
// Code size 17 (0x11)
.maxstack 1
IL_0000: ldarg.0
IL_0001: ldfld ""<>f__AnonymousType1<<>f__AnonymousType0<string, string>, int> C.<>c__DisplayClass0_0.<>h__TransparentIdentifier1""
IL_0006: ldfld ""<>f__AnonymousType0<string, string> <>f__AnonymousType1<<>f__AnonymousType0<string, string>, int>.<<>h__TransparentIdentifier0>i__Field""
IL_000b: ldfld ""string <>f__AnonymousType0<string, string>.<y>i__Field""
IL_0010: ret
}
";
var comp = CreateStandardCompilation(source, new[] { SystemCoreRef }, TestOptions.DebugDll);
WithRuntimeInstance(comp, runtime =>
{
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
GetLocals(runtime, methodName, argumentsOnly: false, locals: locals, count: 4, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "c", expectedILOpt: cIL);
VerifyLocal(testData, typeName, locals[1], "<>m1", "z", expectedILOpt: zIL);
VerifyLocal(testData, typeName, locals[2], "<>m2", "x", expectedILOpt: xIL);
VerifyLocal(testData, typeName, locals[3], "<>m3", "y", expectedILOpt: yIL);
locals.Free();
var context = CreateMethodContext(runtime, methodName);
string error;
testData = new CompilationTestData();
context.CompileExpression("c", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(cIL);
testData = new CompilationTestData();
context.CompileExpression("z", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(zIL);
testData = new CompilationTestData();
context.CompileExpression("x", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(xIL);
testData = new CompilationTestData();
context.CompileExpression("y", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(yIL);
});
}
[WorkItem(3236, "https://github.com/dotnet/roslyn/pull/3236")]
[Fact]
public void AnonymousTypeParameter()
{
const string source = @"
using System.Linq;
class C
{
static void Main(string[] args)
{
var anonymousTypes =
from a in args
select new { Value = a, Length = a.Length };
var values =
from t in anonymousTypes
select t.Value;
}
}
";
const string methodName = "C.<>c.<Main>b__0_1";
const string tIL = @"
{
// Code size 2 (0x2)
.maxstack 1
IL_0000: ldarg.1
IL_0001: ret
}
";
var comp = CreateStandardCompilation(source, new[] { SystemCoreRef }, TestOptions.DebugDll);
WithRuntimeInstance(comp, runtime =>
{
string typeName;
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
CompilationTestData testData;
GetLocals(runtime, methodName, argumentsOnly: false, locals: locals, count: 1, typeName: out typeName, testData: out testData);
VerifyLocal(testData, typeName, locals[0], "<>m0", "t", expectedILOpt: tIL);
locals.Free();
var context = CreateMethodContext(runtime, methodName);
string error;
testData = new CompilationTestData();
context.CompileExpression("t", out error, testData);
Assert.Null(error);
testData.GetMethodData("<>x.<>m0").VerifyIL(tIL);
});
}
[WorkItem(955, "https://github.com/aspnet/Home/issues/955")]
[Fact]
public void ConstantWithErrorType()
{
const string source = @"
class Program
{
static void Main()
{
const int a = 1;
}
}";
var comp = CreateStandardCompilation(source, options: TestOptions.DebugExe);
WithRuntimeInstance(comp, runtime =>
{
var badConst = new MockSymUnmanagedConstant(
"a",
1,
(int bufferLength, out int count, byte[] name) =>
{
count = 0;
return DiaSymReader.SymUnmanagedReaderExtensions.E_NOTIMPL;
});
var debugInfo = new MethodDebugInfoBytes.Builder(constants: new[] { badConst }).Build();
var locals = ArrayBuilder<LocalAndMethod>.GetInstance();
GetLocals(runtime, "Program.Main", debugInfo, locals, count: 0);
locals.Free();
});
}
private static void GetLocals(RuntimeInstance runtime, string methodName, bool argumentsOnly, ArrayBuilder<LocalAndMethod> locals, int count, out string typeName, out CompilationTestData testData)
{
var context = CreateMethodContext(runtime, methodName);
testData = new CompilationTestData();
var assembly = context.CompileGetLocals(locals, argumentsOnly, out typeName, testData);
Assert.NotNull(assembly);
if (count == 0)
{
Assert.Equal(0, assembly.Count);
}
else
{
Assert.InRange(assembly.Count, 0, int.MaxValue);
}
Assert.Equal(count, locals.Count);
}
private static void GetLocals(RuntimeInstance runtime, string methodName, MethodDebugInfoBytes debugInfo, ArrayBuilder<LocalAndMethod> locals, int count)
{
ImmutableArray<MetadataBlock> blocks;
Guid moduleVersionId;
ISymUnmanagedReader unused;
int methodToken;
int localSignatureToken;
GetContextState(runtime, methodName, out blocks, out moduleVersionId, out unused, out methodToken, out localSignatureToken);
var symReader = new MockSymUnmanagedReader(
new Dictionary<int, MethodDebugInfoBytes>()
{
{methodToken, debugInfo}
}.ToImmutableDictionary());
var context = EvaluationContext.CreateMethodContext(
default(CSharpMetadataContext),
blocks,
symReader,
moduleVersionId,
methodToken,
methodVersion: 1,
ilOffset: 0,
localSignatureToken: localSignatureToken);
string typeName;
var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: null);
Assert.NotNull(assembly);
if (count == 0)
{
Assert.Equal(0, assembly.Count);
}
else
{
Assert.InRange(assembly.Count, 0, int.MaxValue);
}
Assert.Equal(count, locals.Count);
}
}
}
| zooba/roslyn | src/ExpressionEvaluator/CSharp/Test/ExpressionCompiler/LocalsTests.cs | C# | apache-2.0 | 133,158 |
/*
* Copyright 2009 Kantega AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.kantega.publishing.admin.content.spellcheck;
import junit.framework.TestCase;
/**
*
*/
public class SpellcheckerServiceImplTest extends TestCase {
public void testSpellcheck() {
// SpellcheckerService spellcheckerService = new SpellcheckerServiceImpl();
//
// List<String> words = new ArrayList<String>();
// words.add("beleived");
// words.add("country");
// words.add("color");
// words.add("colour");
// List<String> returnedWords = spellcheckerService.spellcheck(words);
//
// assertEquals(2, returnedWords.size());
// assertEquals("beleived", returnedWords.get(0));
// assertEquals("colour", returnedWords.get(1));
}
}
| kantega/Flyt-cms | modules/core/src/test/java/no/kantega/publishing/admin/content/spellcheck/SpellcheckerServiceImplTest.java | Java | apache-2.0 | 1,311 |
/*
* Copyright 2009 Kantega AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.kantega.publishing.api.content;
public class ContentIdentifier {
private int contentId = -1; // Peker til innhold - tilsvarer content.getId()
private int associationId = -1; // Peker til menypunkt - tilsvarer association.getAssociationId()
private int siteId = -1; // Brukes sammen contentId for å finne en associationId
private int contextId = -1; // Brukes sammen contentId for å finne en associationId
private int language = Language.NORWEGIAN_BO;
private int version = -1;
private ContentStatus status = null;
public int getContentId() {
return contentId;
}
public void setContentId(int contentId) {
this.contentId = contentId;
}
public int getAssociationId() {
return associationId;
}
public void setAssociationId(int associationId) {
this.associationId = associationId;
}
public void setContextId(int contextId) {
this.contextId = contextId;
}
public int getLanguage() {
return language;
}
public void setLanguage(int language) {
this.language = language;
}
public int getSiteId() {
return siteId;
}
public void setSiteId(int siteId) {
this.siteId = siteId;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public String toString() {
String idStr;
if (this.associationId != -1) {
idStr = "thisId=" + associationId;
} else {
idStr = "contentId=" + contentId;
}
if (version != -1) {
idStr += "&version=" + version;
}
if (siteId != -1) {
idStr += "&siteId=" + siteId;
}
if (status != null) {
idStr += "&status=" + status;
}
return idStr;
}
public ContentStatus getStatus() {
return status;
}
public void setStatus(ContentStatus status) {
this.status = status;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ContentIdentifier that = (ContentIdentifier) o;
return associationId == that.associationId
&& contentId == that.contentId
&& contextId == that.contextId
&& language == that.language
&& siteId == that.siteId
&& status == that.status
&& version == that.version;
}
@Override
public int hashCode() {
int result = contentId;
result = 31 * result + associationId;
result = 31 * result + siteId;
result = 31 * result + contextId;
result = 31 * result + language;
result = 31 * result + version;
if (status != null) {
result = 31 * result + status.getTypeAsInt();
}
return result;
}
public static ContentIdentifier fromAssociationId(int associationId) {
ContentIdentifier contentIdentifier = new ContentIdentifier();
contentIdentifier.setAssociationId(associationId);
return contentIdentifier;
}
public static ContentIdentifier fromContentId(int contentId) {
ContentIdentifier contentIdentifier = new ContentIdentifier();
contentIdentifier.setContentId(contentId);
return contentIdentifier;
}
public int getContextId() {
return contextId;
}
}
| kantega/Flyt-cms | modules/api/src/main/java/no/kantega/publishing/api/content/ContentIdentifier.java | Java | apache-2.0 | 4,148 |
package com.mindoo.domino.jna.internal.structs;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.List;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
public class NotesLSCompileErrorInfoStruct extends BaseStructure {
public short Version;
public short Line;
public /* char* */Pointer pErrText;
public /* char* */Pointer pErrFile;
/**
* @deprecated only public to be used by JNA; use static newInstance method instead to run in AccessController.doPrivileged block
*/
public NotesLSCompileErrorInfoStruct() {
super();
}
public static NotesLSCompileErrorInfoStruct newInstance() {
return AccessController.doPrivileged(new PrivilegedAction<NotesLSCompileErrorInfoStruct>() {
@Override
public NotesLSCompileErrorInfoStruct run() {
return new NotesLSCompileErrorInfoStruct();
}
});
}
@Override
protected List<String> getFieldOrder() {
return Arrays.asList("Version", "Line", "pErrText", "pErrFile");
}
/**
* @deprecated only public to be used by JNA; use static newInstance method instead to run in AccessController.doPrivileged block
*
* @param version allows for future expansion - currently always 1
* @param line source line number of error, relative to LotusScript module containing the error, if applicable
* @param pErrText error text
* @param pErrFile file name, if applicable
*/
public NotesLSCompileErrorInfoStruct(short version, short line, Pointer pErrText, Pointer pErrFile) {
super();
this.Version = version;
this.Line = line;
this.pErrText = pErrText;
this.pErrFile = pErrFile;
}
public static NotesLSCompileErrorInfoStruct newInstance(final short version, final short line, final Pointer pErrText, final Pointer pErrFile) {
return AccessController.doPrivileged(new PrivilegedAction<NotesLSCompileErrorInfoStruct>() {
@Override
public NotesLSCompileErrorInfoStruct run() {
return new NotesLSCompileErrorInfoStruct(version, line, pErrText, pErrFile);
}
});
}
/**
* @deprecated only public to be used by JNA; use static newInstance method instead to run in AccessController.doPrivileged block
*
* @param peer pointer
*/
public NotesLSCompileErrorInfoStruct(Pointer peer) {
super(peer);
}
public static NotesLSCompileErrorInfoStruct newInstance(final Pointer peer) {
return AccessController.doPrivileged(new PrivilegedAction<NotesLSCompileErrorInfoStruct>() {
@Override
public NotesLSCompileErrorInfoStruct run() {
return new NotesLSCompileErrorInfoStruct(peer);
}
});
}
public static class ByReference extends NotesLSCompileErrorInfoStruct implements Structure.ByReference {
};
public static class ByValue extends NotesLSCompileErrorInfoStruct implements Structure.ByValue {
};
public int getVersionAsInt() {
return Version & 0xffff;
}
public int getLineAsInt() {
return Line & 0xffff;
}
}
| klehmann/domino-jna | domino-jna/src/main/java/com/mindoo/domino/jna/internal/structs/NotesLSCompileErrorInfoStruct.java | Java | apache-2.0 | 2,941 |
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.group;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufHolder;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelId;
import io.netty.channel.ServerChannel;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.StringUtil;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* The default {@link ChannelGroup} implementation.
*/
public class DefaultChannelGroup extends AbstractSet<Channel> implements ChannelGroup {
private static final AtomicInteger nextId = new AtomicInteger();
private final String name;
private final EventExecutor executor;
private final ConcurrentMap<ChannelId, Channel> serverChannels = PlatformDependent.newConcurrentHashMap();
private final ConcurrentMap<ChannelId, Channel> nonServerChannels = PlatformDependent.newConcurrentHashMap();
private final ChannelFutureListener remover = new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
remove(future.channel());
}
};
private final VoidChannelGroupFuture voidFuture = new VoidChannelGroupFuture(this);
private final boolean stayClosed;
private volatile boolean closed;
/**
* Creates a new group with a generated name and the provided {@link EventExecutor} to notify the
* {@link ChannelGroupFuture}s.
*/
public DefaultChannelGroup(EventExecutor executor) {
this(executor, false);
}
/**
* Creates a new group with the specified {@code name} and {@link EventExecutor} to notify the
* {@link ChannelGroupFuture}s. Please note that different groups can have the same name, which means no
* duplicate check is done against group names.
*/
public DefaultChannelGroup(String name, EventExecutor executor) {
this(name, executor, false);
}
/**
* Creates a new group with a generated name and the provided {@link EventExecutor} to notify the
* {@link ChannelGroupFuture}s. {@code stayClosed} defines whether or not, this group can be closed
* more than once. Adding channels to a closed group will immediately close them, too. This makes it
* easy, to shutdown server and child channels at once.
*/
public DefaultChannelGroup(EventExecutor executor, boolean stayClosed) {
this("group-0x" + Integer.toHexString(nextId.incrementAndGet()), executor, stayClosed);
}
/**
* Creates a new group with the specified {@code name} and {@link EventExecutor} to notify the
* {@link ChannelGroupFuture}s. {@code stayClosed} defines whether or not, this group can be closed
* more than once. Adding channels to a closed group will immediately close them, too. This makes it
* easy, to shutdown server and child channels at once. Please note that different groups can have
* the same name, which means no duplicate check is done against group names.
*/
public DefaultChannelGroup(String name, EventExecutor executor, boolean stayClosed) {
ObjectUtil.checkNotNull(name, "name");
this.name = name;
this.executor = executor;
this.stayClosed = stayClosed;
}
@Override
public String name() {
return name;
}
@Override
public Channel find(ChannelId id) {
Channel c = nonServerChannels.get(id);
if (c != null) {
return c;
} else {
return serverChannels.get(id);
}
}
@Override
public boolean isEmpty() {
return nonServerChannels.isEmpty() && serverChannels.isEmpty();
}
@Override
public int size() {
return nonServerChannels.size() + serverChannels.size();
}
@Override
public boolean contains(Object o) {
if (o instanceof ServerChannel) {
return serverChannels.containsValue(o);
} else if (o instanceof Channel) {
return nonServerChannels.containsValue(o);
}
return false;
}
@Override
public boolean add(Channel channel) {
ConcurrentMap<ChannelId, Channel> map =
channel instanceof ServerChannel? serverChannels : nonServerChannels;
boolean added = map.putIfAbsent(channel.id(), channel) == null;
if (added) {
channel.closeFuture().addListener(remover);
}
if (stayClosed && closed) {
// First add channel, than check if closed.
// Seems inefficient at first, but this way a volatile
// gives us enough synchronization to be thread-safe.
//
// If true: Close right away.
// (Might be closed a second time by ChannelGroup.close(), but this is ok)
//
// If false: Channel will definitely be closed by the ChannelGroup.
// (Because closed=true always happens-before ChannelGroup.close())
//
// See https://github.com/netty/netty/issues/4020
channel.close();
}
return added;
}
@Override
public boolean remove(Object o) {
Channel c = null;
if (o instanceof ChannelId) {
c = nonServerChannels.remove(o);
if (c == null) {
c = serverChannels.remove(o);
}
} else if (o instanceof Channel) {
c = (Channel) o;
if (c instanceof ServerChannel) {
c = serverChannels.remove(c.id());
} else {
c = nonServerChannels.remove(c.id());
}
}
if (c == null) {
return false;
}
c.closeFuture().removeListener(remover);
return true;
}
@Override
public void clear() {
nonServerChannels.clear();
serverChannels.clear();
}
@Override
public Iterator<Channel> iterator() {
return new CombinedIterator<Channel>(
serverChannels.values().iterator(),
nonServerChannels.values().iterator());
}
@Override
public Object[] toArray() {
Collection<Channel> channels = new ArrayList<Channel>(size());
channels.addAll(serverChannels.values());
channels.addAll(nonServerChannels.values());
return channels.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
Collection<Channel> channels = new ArrayList<Channel>(size());
channels.addAll(serverChannels.values());
channels.addAll(nonServerChannels.values());
return channels.toArray(a);
}
@Override
public ChannelGroupFuture close() {
return close(ChannelMatchers.all());
}
@Override
public ChannelGroupFuture disconnect() {
return disconnect(ChannelMatchers.all());
}
@Override
public ChannelGroupFuture deregister() {
return deregister(ChannelMatchers.all());
}
@Override
public ChannelGroupFuture write(Object message) {
return write(message, ChannelMatchers.all());
}
// Create a safe duplicate of the message to write it to a channel but not affect other writes.
// See https://github.com/netty/netty/issues/1461
private static Object safeDuplicate(Object message) {
if (message instanceof ByteBuf) {
return ((ByteBuf) message).retainedDuplicate();
} else if (message instanceof ByteBufHolder) {
return ((ByteBufHolder) message).retainedDuplicate();
} else {
return ReferenceCountUtil.retain(message);
}
}
@Override
public ChannelGroupFuture write(Object message, ChannelMatcher matcher) {
return write(message, matcher, false);
}
@Override
public ChannelGroupFuture write(Object message, ChannelMatcher matcher, boolean voidPromise) {
ObjectUtil.checkNotNull(message, "message");
ObjectUtil.checkNotNull(matcher, "matcher");
final ChannelGroupFuture future;
if (voidPromise) {
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
c.write(safeDuplicate(message), c.voidPromise());
}
}
future = voidFuture;
} else {
Map<Channel, ChannelFuture> futures = new LinkedHashMap<Channel, ChannelFuture>(nonServerChannels.size());
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.write(safeDuplicate(message)));
}
}
future = new DefaultChannelGroupFuture(this, futures, executor);
}
ReferenceCountUtil.release(message);
return future;
}
@Override
public ChannelGroup flush() {
return flush(ChannelMatchers.all());
}
@Override
public ChannelGroupFuture flushAndWrite(Object message) {
return writeAndFlush(message);
}
@Override
public ChannelGroupFuture writeAndFlush(Object message) {
return writeAndFlush(message, ChannelMatchers.all());
}
@Override
public ChannelGroupFuture disconnect(ChannelMatcher matcher) {
ObjectUtil.checkNotNull(matcher, "matcher");
Map<Channel, ChannelFuture> futures =
new LinkedHashMap<Channel, ChannelFuture>(size());
for (Channel c: serverChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.disconnect());
}
}
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.disconnect());
}
}
return new DefaultChannelGroupFuture(this, futures, executor);
}
@Override
public ChannelGroupFuture close(ChannelMatcher matcher) {
ObjectUtil.checkNotNull(matcher, "matcher");
Map<Channel, ChannelFuture> futures =
new LinkedHashMap<Channel, ChannelFuture>(size());
if (stayClosed) {
// It is important to set the closed to true, before closing channels.
// Our invariants are:
// closed=true happens-before ChannelGroup.close()
// ChannelGroup.add() happens-before checking closed==true
//
// See https://github.com/netty/netty/issues/4020
closed = true;
}
for (Channel c: serverChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.close());
}
}
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.close());
}
}
return new DefaultChannelGroupFuture(this, futures, executor);
}
@Override
public ChannelGroupFuture deregister(ChannelMatcher matcher) {
ObjectUtil.checkNotNull(matcher, "matcher");
Map<Channel, ChannelFuture> futures =
new LinkedHashMap<Channel, ChannelFuture>(size());
for (Channel c: serverChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.deregister());
}
}
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.deregister());
}
}
return new DefaultChannelGroupFuture(this, futures, executor);
}
@Override
public ChannelGroup flush(ChannelMatcher matcher) {
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
c.flush();
}
}
return this;
}
@Override
public ChannelGroupFuture flushAndWrite(Object message, ChannelMatcher matcher) {
return writeAndFlush(message, matcher);
}
@Override
public ChannelGroupFuture writeAndFlush(Object message, ChannelMatcher matcher) {
return writeAndFlush(message, matcher, false);
}
@Override
public ChannelGroupFuture writeAndFlush(Object message, ChannelMatcher matcher, boolean voidPromise) {
ObjectUtil.checkNotNull(message, "message");
final ChannelGroupFuture future;
if (voidPromise) {
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
c.writeAndFlush(safeDuplicate(message), c.voidPromise());
}
}
future = voidFuture;
} else {
Map<Channel, ChannelFuture> futures = new LinkedHashMap<Channel, ChannelFuture>(nonServerChannels.size());
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.writeAndFlush(safeDuplicate(message)));
}
}
future = new DefaultChannelGroupFuture(this, futures, executor);
}
ReferenceCountUtil.release(message);
return future;
}
@Override
public ChannelGroupFuture newCloseFuture() {
return newCloseFuture(ChannelMatchers.all());
}
@Override
public ChannelGroupFuture newCloseFuture(ChannelMatcher matcher) {
Map<Channel, ChannelFuture> futures =
new LinkedHashMap<Channel, ChannelFuture>(size());
for (Channel c: serverChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.closeFuture());
}
}
for (Channel c: nonServerChannels.values()) {
if (matcher.matches(c)) {
futures.put(c, c.closeFuture());
}
}
return new DefaultChannelGroupFuture(this, futures, executor);
}
@Override
public int hashCode() {
return System.identityHashCode(this);
}
@Override
public boolean equals(Object o) {
return this == o;
}
@Override
public int compareTo(ChannelGroup o) {
int v = name().compareTo(o.name());
if (v != 0) {
return v;
}
return System.identityHashCode(this) - System.identityHashCode(o);
}
@Override
public String toString() {
return StringUtil.simpleClassName(this) + "(name: " + name() + ", size: " + size() + ')';
}
}
| fenik17/netty | transport/src/main/java/io/netty/channel/group/DefaultChannelGroup.java | Java | apache-2.0 | 15,366 |
from numpy import absolute
from numpy import multiply
from numpy import where
from numpy import zeros
from gwlfe.Memoization import memoize
from gwlfe.enums import ETflag
# @memoize #TODO: adding memoization causes this function to not pass the tests
def DailyET(NYrs, DaysMonth, Temp, DayHrs, KV, PcntET, ETFlag):
result = zeros((NYrs, 12, 31))
# CALCULATE ET FROM SATURATED VAPOR PRESSURE,
# HAMON (1961) METHOD
for Y in range(NYrs):
for i in range(12):
for j in range(DaysMonth[Y][i]):
DailyTemp = Temp[Y][i][j]
if ETFlag is ETflag.HAMON_METHOD:
if DailyTemp > 0:
SatVaPressure = (33.8639 * ((0.00738 * DailyTemp +
0.8072) ** 8 - 0.000019 *
absolute(1.8 * DailyTemp + 48) +
0.001316))
PotenET = (0.021 * DayHrs[i] ** 2 * SatVaPressure / (DailyTemp + 273))
ET = KV[i] * PotenET * PcntET[i]
result[Y][i][j] = ET
return result
@memoize
def SatVaPressure(Temp):
return (33.8639 * ((0.00738 * Temp + 0.8072) ** 8 - 0.000019 * absolute(1.8 * Temp + 48) + 0.001316))
@memoize
def PotentET(DayHrs, Temp):
return multiply(0.021 * ((DayHrs ** 2).reshape(12, 1)), SatVaPressure(Temp)) / (Temp + 273)
@memoize
def DailyET_f(Temp, KV, PcntET, DayHrs):
return where(Temp > 0, multiply((KV * PcntET).reshape(12, 1), PotentET(DayHrs, Temp)), 0)
| WikiWatershed/gwlf-e | gwlfe/Input/WaterBudget/ET.py | Python | apache-2.0 | 1,598 |
var addChanged=false;
var positionFound = false;
var geocoder = new google.maps.Geocoder();
var newLat;
var newLng;
/**
Code une addresse en 2 coordonnées, latitude et longitude
Si le codage est russit on appelle funcFound, sinon funcFail
*/
function codeAddress(addressToTest, funcFound, funcFail) {
/* Récupération de la valeur de l'adresse saisie */
//var address = document.getElementById("address").value;
/* Appel au service de geocodage avec l'adresse en paramètre */
geocoder.geocode( { 'address': addressToTest}, function(results, status) {
/* Si l'adresse a pu être géolocalisée */
if (status == google.maps.GeocoderStatus.OK) {
/* Récupération de sa latitude et de sa longitude */
var coords = (results[0].geometry.location);
funcFound(coords.lat(), coords.lng());//cette fonction est à définir par l'appelant
}else{
funcFail(addressToTest);//cette fonction est à définir par l'appelant
}
});
}
/**
Cette fonction prend en paramètre des coordonnées et renvoi l'adresse correspondante si trouvée
*/
function getAddress(latitude, longitude, funcFound, funcFail) {
var latlng = new google.maps.LatLng(latitude,longitude);
geocoder.geocode({ 'latLng': latlng }, function (results, status) {
if (status !== google.maps.GeocoderStatus.OK) {
funcFail(latitude, longitude);
}
// This is checking to see if the Geoeode Status is OK before proceeding
if (status == google.maps.GeocoderStatus.OK) {
//alert(results[0].formatted_address);
funcFound(results[0].formatted_address);
}
});
}
| gpierre42/optraj | vagrant/optraj.istic.univ-rennes1.fr/GUI/htdocs/js/util/addressResolution.js | JavaScript | apache-2.0 | 1,609 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.refactoring.extractMethodObject.reflect;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElementFactory;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiMethodCallExpression;
import com.intellij.refactoring.extractMethodObject.ItemToReplaceDescriptor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class FieldDeclarationDescriptor implements ItemToReplaceDescriptor {
private final PsiField myField;
private final String myName;
private FieldDeclarationDescriptor(@NotNull PsiField field, @NotNull String name) {
myField = field;
myName = name;
}
@Nullable
public static ItemToReplaceDescriptor createIfInaccessible(@NotNull PsiField field) {
String fieldName = field.getName();
if (!PsiReflectionAccessUtil.isAccessibleType(field.getType())) {
return new FieldDeclarationDescriptor(field, fieldName);
}
return null;
}
@Override
public void replace(@NotNull PsiClass outerClass,
@NotNull PsiElementFactory elementFactory,
@NotNull PsiMethodCallExpression callExpression) {
PsiField newField = elementFactory.createField(myName, PsiReflectionAccessUtil.nearestAccessibleType(myField.getType()));
myField.replace(newField);
}
}
| leafclick/intellij-community | java/java-impl/src/com/intellij/refactoring/extractMethodObject/reflect/FieldDeclarationDescriptor.java | Java | apache-2.0 | 1,468 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.cdi;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.DefaultComponent;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Named;
import java.util.Map;
@Named("cdi-event")
@ApplicationScoped
/* package-private */ class CdiEventComponent extends DefaultComponent {
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) {
/* The CDI event endpoint URI follows the format hereafter:
cdi-event://PayloadType<T1,...,Tn>[?qualifiers=QualifierType1[,...[,QualifierTypeN]...]]
with the authority PayloadType (respectively the QualifierType) being the URI escaped fully qualified name of the payload (respectively qualifier) raw type followed by the type parameters section delimited by angle brackets for payload parameterized type.
Which leads to unfriendly URIs, e.g.:
cdi-event://org.apache.camel.cdi.se.pojo.EventPayload%3Cjava.lang.Integer%3E?qualifiers=org.apache.camel.cdi.se.qualifier.FooQualifier%2Corg.apache.camel.cdi.se.qualifier.BarQualifier
From the conceptual standpoint, that shows the high impedance between the typesafe nature of CDI and the dynamic nature of the Camel component model.
From the implementation standpoint, that would prevent efficient binding between the endpoint instances and observer methods as the CDI container doesn't have any ways of discovering the Camel context model during the deployment phase.
*/
throw new UnsupportedOperationException("Creating CDI event endpoint isn't supported. Use @Inject " + CdiEventEndpoint.class.getSimpleName() + " instead");
}
}
| astefanutti/camel-cdi | impl/src/main/java/org/apache/camel/cdi/CdiEventComponent.java | Java | apache-2.0 | 2,513 |
import {ComponentFixture, TestBed, waitForAsync} from '@angular/core/testing';
import {FormsModule} from '@angular/forms';
import {ClarityModule} from '@clr/angular';
import {RouterTestingModule} from '@angular/router/testing';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {AboutServiceMock} from '../../tests/api/about.service.mock';
import {NotificationServiceMock} from '../../tests/service/notification.service.mock';
import {SecurityServiceMock} from '../../tests/api/security.service.mock';
import {RoleDirective} from '../../security/directive/role.directive';
import {JobsComponent} from './jobs.component';
import {JobServiceMock} from '../../tests/api/job.service.mock';
import {StopComponent} from '../executions/stop/stop.component';
import {ConfirmComponent} from '../../shared/component/confirm/confirm.component';
import {ContextServiceMock} from '../../tests/service/context.service.mock';
import {SettingsServiceMock} from '../../tests/service/settings.service.mock';
import {DateFilterComponent} from '../../shared/filter/date/date.filter';
import {DatagridColumnPipe} from '../../shared/pipe/datagrid-column.pipe';
import {TranslateTestingModule} from 'ngx-translate-testing';
import TRANSLATIONS from '../../../assets/i18n/en.json';
describe('tasks-jobs/jobs/jobs.component.ts', () => {
let component: JobsComponent;
let fixture: ComponentFixture<JobsComponent>;
beforeEach(
waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [
JobsComponent,
StopComponent,
ConfirmComponent,
RoleDirective,
DateFilterComponent,
DatagridColumnPipe
],
imports: [
FormsModule,
ClarityModule,
RouterTestingModule.withRoutes([]),
TranslateTestingModule.withTranslations('en', TRANSLATIONS),
BrowserAnimationsModule
],
providers: [
SecurityServiceMock.provider,
AboutServiceMock.provider,
NotificationServiceMock.provider,
JobServiceMock.provider,
ContextServiceMock.provider,
SettingsServiceMock.provider
]
}).compileComponents();
})
);
beforeEach(() => {
fixture = TestBed.createComponent(JobsComponent);
component = fixture.componentInstance;
NotificationServiceMock.mock.clearAll();
});
it('should be created', () => {
fixture.detectChanges();
expect(component).toBeTruthy();
});
});
| spring-cloud/spring-cloud-dataflow-ui | ui/src/app/tasks-jobs/jobs/jobs.component.spec.ts | TypeScript | apache-2.0 | 2,521 |
package controllers.api.v3;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.jsonwebtoken.JwtBuilder;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import libs.auth.ActionAuthenticator;
import libs.db.MorphiaObject;
import libs.helper.Helper;
import libs.json.ErrorHandler;
import models.MongoMerchant;
import models.MongoUser;
import org.mindrot.jbcrypt.BCrypt;
import org.mongodb.morphia.query.Query;
import play.libs.Json;
import play.mvc.BodyParser;
import play.mvc.Controller;
import play.mvc.Result;
import play.mvc.Security;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.bind.DatatypeConverter;
import java.security.Key;
import java.util.Date;
import java.util.UUID;
/**
* Merchant Controller
*/
public class Merchant extends Controller {
/**
* User Login Endpoint
* Token is valid for 10 minutes
* @return Json Result
*/
@BodyParser.Of(BodyParser.Json.class)
public Result userLogin() {
JsonNode json = request().body().asJson();
String email = json.findPath("email").textValue();
String password = json.findPath("password").textValue();
// Get user
Query q = MorphiaObject.datastore.createQuery(MongoUser.class);
q.filter("email", email);
MongoUser user = (MongoUser) q.get();
if (user == null || !BCrypt.checkpw(password, user.getPassword())) {
ErrorHandler e = new ErrorHandler();
e.setError(401, "Unauthorized");
return status(401, e.pretty());
}
String jwt = createJWT(user.getIdString(), 1000 * 60 * 10);
ObjectNode result = Json.newObject();
result.set("token", Json.toJson(jwt));
result.set("status", Json.toJson("APPROVED"));
return ok(Json.prettyPrint(result)).as("application/json; charset=utf-8");
}
/**
* Authenticated Merchant Get Endpoint
* @return Json Result
*/
@Security.Authenticated(ActionAuthenticator.class)
public Result get() {
Integer id = null;
try {
id = Integer.parseInt(request().getQueryString("id"));
} catch (Exception ex) {
ErrorHandler e = new ErrorHandler();
e.setError(403, "Bad request");
return status(403, e.pretty());
}
// Get merchant
Query q = MorphiaObject.datastore.createQuery(MongoMerchant.class);
q.filter("id", id);
MongoMerchant merchant = (MongoMerchant) q.get();
if (merchant == null) {
ErrorHandler e = new ErrorHandler();
e.setError(404, "Not found");
return status(404, e.pretty());
}
ObjectNode result = Json.newObject();
result.set("status", Json.toJson("APPROVED"));
result.set("message", Json.toJson("Approved"));
result.set("merchant", Json.toJson(merchant));
return ok(Json.prettyPrint(result)).as("application/json; charset=utf-8");
}
/**
* Authenticated Merchant Endpoint
* @return Json Result
*/
@Security.Authenticated(ActionAuthenticator.class)
public Result index() {
String transactionId = request().getQueryString("transactionId");
if (transactionId == null || transactionId.isEmpty()) {
ErrorHandler e = new ErrorHandler();
e.setError(403, "Transaction id required");
return status(403, e.pretty());
}
// Get merchant
Query q = MorphiaObject.datastore.createQuery(MongoMerchant.class);
q.filter("transactionId", transactionId);
MongoMerchant merchant = (MongoMerchant) q.get();
if (merchant == null) {
ErrorHandler e = new ErrorHandler();
e.setError(404, "Not found");
return status(404, e.pretty());
}
ObjectNode result = Json.newObject();
result.set("merchant", Json.toJson(merchant));
return ok(Json.prettyPrint(result)).as("application/json; charset=utf-8");
}
private String createJWT(String subject, long ttlMillis) {
// The JWT signature algorithm we will be using to sign the token
SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.HS256;
long nowMillis = System.currentTimeMillis();
Date now = new Date(nowMillis);
// We will sign our JWT with our ApiKey secret
byte[] apiKeySecretBytes = DatatypeConverter.parseBase64Binary(Helper.getConf("jwt.secret"));
Key signingKey = new SecretKeySpec(apiKeySecretBytes, signatureAlgorithm.getJcaName());
// Let's set the JWT Claims
JwtBuilder builder = Jwts.builder().setId(UUID.randomUUID().toString())
.setIssuedAt(now)
.setSubject(subject)
.setIssuer(Helper.getConf("api.url"))
// .claim("context", context)
.signWith(signatureAlgorithm, signingKey);
// If it has been specified, let's add the expiration
if (ttlMillis >= 0) {
long expMillis = nowMillis + ttlMillis;
Date exp = new Date(expMillis);
builder.setExpiration(exp);
}
// Builds the JWT and serializes it to a compact, URL-safe string
return builder.compact();
}
}
| sezginonline/restapi.play | app/controllers/api/v3/Merchant.java | Java | apache-2.0 | 5,344 |
using App.WebInfo.Entities.Concrete;
namespace App.WebInfo.Business.Abstract
{
public interface IPersonalService:IServiceBase<Personal>
{
}
}
| AhmetKayar/WebInfo | WebInfoApp/App.WebInfo.Business/Abstract/IPersonalService.cs | C# | apache-2.0 | 158 |
using System.Reflection;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过以下
// 特性集控制。更改这些特性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("AC Crypt API")]
[assembly: AssemblyDescription("AC Crypt API")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("AC Inc.")]
[assembly: AssemblyProduct("AC Library")]
[assembly: AssemblyCopyright("Copyright © AC Inc.")]
[assembly: AssemblyTrademark("AC Library")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。 如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 特性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("68d00c1c-3472-4349-b32a-ffcd4d6dbce1")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 生成号
// 修订号
//
// 可以指定所有这些值,也可以使用“生成号”和“修订号”的默认值,
// 方法是按如下所示使用“*”:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("3.4.0.1")]
[assembly: AssemblyFileVersion("3.4.0.1")]
| win120a/ACLibrary | ACCryptoAPI/Properties/AssemblyInfo.cs | C# | apache-2.0 | 1,302 |
'''
Created on Jul 29, 2015
@author: Mikhail
'''
import json
import random
import string
import time
import os
__version__ = 2.0
class MyOwnJSONProcessing:
# variables for amount of objects in dictionary (for json)
min_len_of_json_dict = 1
max_len_of_json_dict = 5
# variables for max and min length of keys in dictionary (for json)
min_len_of_key = 1
max_len_of_key = 10
# variable for max value in dictionary (for json)
max_value = 100
@classmethod
def generate_set_of_files_with_json_obj(cls, amount_of_files, is_data_complicated = False):
for dummy_i in xrange(amount_of_files):
if not is_data_complicated:
# we will generate simple data for json file
cls.generate_json_file_with_data(data = cls.generate_data_for_json_obj())
else:
# lets try to generate more complicated data for json file
cls.generate_json_file_with_data(data = cls.generate_complicated_data_for_json_obj())
@classmethod
def generate_data_for_json_obj(cls):
json_data = {}
# generating random key
for dummy_i in range(random.randrange(cls.min_len_of_json_dict, cls.max_len_of_json_dict)):
new_key = cls.randomword(random.randrange(cls.min_len_of_key, cls.max_len_of_key))
new_value = random.randrange(cls.max_value)
if not json_data.has_key(new_key):
json_data[new_key] = new_value
return json_data
@classmethod
def generate_complicated_data_for_json_obj(cls):
raise NotImplementedError
@staticmethod
def generate_json_file_with_data(file_name_template = "data_<timestamp>.json", data = {}):
"""
By default this function generates json file with name that contains time-stamp
when it has been generated
"""
file_name_id = 0
file_name = string.replace(file_name_template, '<timestamp>', str(time.time())) if (string.find(file_name_template, '<timestamp>') != -1) else file_name_template
while os.path.exists(file_name):
file_name_id += 1
file_name = string.replace(file_name_template, '<timestamp>', str(time.time())) if (string.find(file_name_template, '<timestamp>') != -1) else string.replace(file_name_template, ".", str(file_name_id) + ".")
# process the file
with open(file_name, 'w') as f:
json.dump(data, f, indent = 4)
print "File {} has been generated".format(file_name)
return file_name
@staticmethod
def load_data_from_json_file(file_name):
data = {}
with open(file_name, 'r') as f:
data = json.load(f)
return data
@staticmethod
def randomword(length):
return ''.join(random.choice(string.lowercase + string.digits) for dummy_i in range(length))
@staticmethod
def clean_up(dir_with_tests = ".", postfix = ".json"):
"""
This function removes all files in folder from parameters (not from subfolders) with required postfix
@param dir_with_tests: directory when selected files should be removed
@param postfix: postfix for files that should be removed
"""
for name in os.listdir(dir_with_tests):
if name.endswith(postfix):
file_or_dir_name = os.path.join(dir_with_tests, name)
# we should process only files
if os.path.isfile(file_or_dir_name):
os.remove(file_or_dir_name)
print "File {} has been removed...".format(file_or_dir_name)
| MikeLaptev/sandbox_python | mera/unittest_example/json_file_generator.py | Python | apache-2.0 | 3,656 |
package jp.eq_inc.aranduh;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("jp.eq_inc.aranduh", appContext.getPackageName());
}
}
| eq-inc/eq-panicar-unlimitedhand | app/src/androidTest/java/jp/eq_inc/aranduh/ExampleInstrumentedTest.java | Java | apache-2.0 | 738 |
package active_test
import (
"context"
"log"
"sync"
"testing"
"time"
"github.com/m-lab/etl/active"
"google.golang.org/api/iterator"
)
type throttleStats struct {
running chan struct{}
doneCount chan struct{}
lock sync.Mutex
maxRunning int
}
func (mt *throttleStats) add() int {
mt.running <- struct{}{} // Add to the count.
now := len(mt.running)
mt.lock.Lock()
defer mt.lock.Unlock()
if mt.maxRunning < now {
mt.maxRunning = now
}
return now
}
func (mt *throttleStats) end() {
// decrement inFlight, and increment doneCount.
mt.doneCount <- <-mt.running
}
func (mt *throttleStats) done() int {
return len(mt.doneCount)
}
func (mt *throttleStats) max() int {
mt.lock.Lock()
defer mt.lock.Unlock()
return mt.maxRunning
}
func newThrottleStats(n int) *throttleStats {
return &throttleStats{
running: make(chan struct{}, n),
doneCount: make(chan struct{}, n),
}
}
type source struct {
count int
stats *throttleStats
}
type statsRunnable struct {
stats *throttleStats
}
func (s *source) Next(ctx context.Context) (active.Runnable, error) {
if s.count > 0 {
s.count--
return &statsRunnable{s.stats}, nil
}
return nil, iterator.Done
}
func (s *source) Label() string {
return "label"
}
func (sr *statsRunnable) Run(ctx context.Context) error {
now := sr.stats.add()
defer sr.stats.end()
log.Println(now, "running")
time.Sleep(1 * time.Millisecond)
return nil
}
func (sr *statsRunnable) Info() string {
return "info"
}
func TestThrottledSource(t *testing.T) {
src := source{count: 5, stats: newThrottleStats(100)}
// throttle to handle two at a time.
ts := active.Throttle(&src, active.NewWSTokenSource(2))
eg, err := runAll(context.Background(), ts)
if err != iterator.Done {
t.Fatal("Expected iterator.Done", err)
}
err = eg.Wait()
if err != nil {
t.Fatal(err)
}
if src.stats.done() != 5 {
t.Error("Should have been 5 runnables", src.stats.done())
}
if len(src.stats.running) != 0 {
t.Error("running should be 0:", len(src.stats.running))
}
if src.stats.max() != 2 {
t.Error("Max running != 2", src.stats.max())
}
}
| m-lab/etl | active/throttle_test.go | GO | apache-2.0 | 2,118 |
package net.bytebuddy.build.maven;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* An abstract base class for a user configuration implying a Maven coordinate.
*/
@SuppressFBWarnings(value = "UWF_UNWRITTEN_PUBLIC_OR_PROTECTED_FIELD", justification = "Written to by Maven")
public class AbstractUserConfiguration {
/**
* The group id of the project containing the plugin type or {@code null} if the current project's group id should be used.
*/
protected String groupId;
/**
* The artifact id of the project containing the plugin type or {@code null} if the current project's artifact id should be used.
*/
protected String artifactId;
/**
* The version of the project containing the plugin type or {@code null} if the current project's version should be used.
*/
protected String version;
/**
* The version of the project containing the plugin type or {@code null} if the current project's packaging should be used.
*/
protected String packaging;
/**
* Returns the group id to use.
*
* @param groupId The current project's group id.
* @return The group id to use.
*/
protected String getGroupId(String groupId) {
return this.groupId == null || this.groupId.length() == 0
? groupId
: this.groupId;
}
/**
* Returns the artifact id to use.
*
* @param artifactId The current project's artifact id.
* @return The artifact id to use.
*/
protected String getArtifactId(String artifactId) {
return this.artifactId == null || this.artifactId.length() == 0
? artifactId
: this.artifactId;
}
/**
* Returns the version to use.
*
* @param version The current project's version.
* @return The version to use.
*/
protected String getVersion(String version) {
return this.version == null || this.version.length() == 0
? version
: this.version;
}
/**
* Returns the version to use.
*
* @param packaging The current project's packaging.
* @return The packaging to use.
*/
protected String getPackaging(String packaging) {
return this.packaging == null || this.packaging.length() == 0
? packaging
: this.packaging;
}
/**
* Resolves this transformation to a Maven coordinate.
*
* @param groupId The current project's build id.
* @param artifactId The current project's artifact id.
* @param version The current project's version.
* @param packaging The current project's packaging
* @return The resolved Maven coordinate.
*/
public MavenCoordinate asCoordinate(String groupId, String artifactId, String version, String packaging) {
return new MavenCoordinate(getGroupId(groupId), getArtifactId(artifactId), getVersion(version), getPackaging(packaging));
}
}
| DALDEI/byte-buddy | byte-buddy-maven-plugin/src/main/java/net/bytebuddy/build/maven/AbstractUserConfiguration.java | Java | apache-2.0 | 3,015 |
package org.minimalj.example.erp.model;
import java.math.BigDecimal;
import org.minimalj.model.Keys;
import org.minimalj.model.Rendering;
import org.minimalj.model.annotation.Size;
public class OfferArticle implements Rendering {
public static final OfferArticle $ = Keys.of(OfferArticle.class);
public ArticleView article;
@Size(3)
public Integer numberof;
@Size(10)
public BigDecimal price;
@Override
public String render() {
StringBuilder s = new StringBuilder();
s.append(article.article);
if (numberof != null) {
s.append(", ").append(numberof).append(" Stück");
}
if (price != null) {
s.append(", Preis: ").append(price);
}
return s.toString();
}
}
| BrunoEberhard/minimal-j | example/008_ERP/src/org/minimalj/example/erp/model/OfferArticle.java | Java | apache-2.0 | 696 |
/*
* Copyright 2015-2018 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.db.dao;
import java.util.List;
import org.mycontroller.standalone.db.tables.SensorsVariablesMap;
import org.mycontroller.standalone.message.McMessageUtils.MESSAGE_TYPE_PRESENTATION;
import org.mycontroller.standalone.message.McMessageUtils.MESSAGE_TYPE_SET_REQ;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.2
*/
public interface SensorsVariablesMapDao {
void create(SensorsVariablesMap sensorsVariablesMap);
void create(MESSAGE_TYPE_PRESENTATION sensorType, MESSAGE_TYPE_SET_REQ variableType);
void delete(SensorsVariablesMap sensorsVariablesMap);
void delete(MESSAGE_TYPE_PRESENTATION sensorType);
List<SensorsVariablesMap> getAll(MESSAGE_TYPE_PRESENTATION sensorType);
List<SensorsVariablesMap> getAll();
SensorsVariablesMap get(SensorsVariablesMap sensorsVariablesMap);
SensorsVariablesMap get(MESSAGE_TYPE_PRESENTATION sensorType, MESSAGE_TYPE_SET_REQ variableType);
}
| pgh70/mycontroller | modules/core/src/main/java/org/mycontroller/standalone/db/dao/SensorsVariablesMapDao.java | Java | apache-2.0 | 1,647 |
'use strict';
var storiesdb = require('nano')('http://localhost:5984/stories');
var viewDoc = {
_id: '_design/story',
language: 'javascript',
views: {
byUser: require('./story-by-user'),
byTitle: require('./story-by-title')
}
};
module.exports = {
// Generates views
init: function () {
storiesdb.insert(viewDoc, function (err) {
if (err) return console.error(err);
console.log('Successfully created story views');
});
}
};
| bananaoomarang/chapters-server | lib/couch-views/index.js | JavaScript | apache-2.0 | 481 |
(function(exports) {
'use strict';
var _settingsPanel, _closeSettingsButton, _logoutSettingsButton,
_cleanCallsButton, _cleanUrlsButton, _videoDefaultSettings,
_commitHashTag, _cameraDefaultSettings, _loggedAs;
var _isVideoDefault = true;
var _isFrontCameraDefault = true;
var _isSingleCamera = false;
const VIDEO_SETTING = 'video-default';
const CAMERA_SETTING = 'camera-default';
var _;
var Settings = {
get isVideoDefault() {
return _isVideoDefault;
},
reset: function s_clear() {
asyncStorage.setItem(
VIDEO_SETTING,
true
);
asyncStorage.setItem(
CAMERA_SETTING,
true
);
_isVideoDefault = true;
_isFrontCameraDefault = true;
_isSingleCamera = false;
},
init: function s_init(identity) {
if (!_settingsPanel) {
// Cache mozL10n functionality
_ = navigator.mozL10n.get;
// Cache DOM elements
_loggedAs = document.getElementById('settings-logout-identity');
_settingsPanel = document.getElementById('settings-panel');
_closeSettingsButton = document.getElementById('settings-close-button');
_logoutSettingsButton = document.getElementById('settings-logout-button');
_cleanCallsButton = document.getElementById('settings-clean-calls-button');
_cleanUrlsButton = document.getElementById('settings-clean-urls-button');
_videoDefaultSettings = document.getElementById('video-default-setting');
_cameraDefaultSettings = document.getElementById('camera-default-setting');
_commitHashTag = document.getElementById('settings-commit-hash-tag');
// Add listeners just once
_cleanCallsButton.addEventListener(
'click',
function() {
var options = new OptionMenu({
// TODO Change with l10n string when ready
section: _('deleteAllConfirmation'),
type: 'confirm',
items: [
{
name: 'Delete',
class: 'danger',
l10nId: 'delete',
method: function() {
CallLog.cleanCalls();
Settings.hide();
},
params: []
},
{
name: 'Cancel',
l10nId: 'cancel'
}
]
});
options.show();
}.bind(this)
);
_cleanUrlsButton.addEventListener(
'click',
function() {
var options = new OptionMenu({
type: 'action',
items: [
{
name: 'Clean just revoked URLs',
l10nId: 'cleanJustRevoked',
method: function() {
CallLog.cleanRevokedUrls();
Settings.hide();
},
params: []
},
{
name: 'Clean all',
l10nId: 'cleanAll',
method: function() {
CallLog.cleanUrls();
Settings.hide();
},
params: []
},
{
name: 'Cancel',
l10nId: 'cancel'
}
]
});
options.show();
}.bind(this)
);
_closeSettingsButton.addEventListener(
'click',
this.hide.bind(this)
);
_logoutSettingsButton.addEventListener(
'click',
function onLogout() {
LoadingOverlay.show(_('loggingOut'));
Controller.logout();
}.bind(this)
);
}
// Set the value taking into account the identity
_loggedAs.innerHTML = _(
'loggedInAs',
{
username: identity || _('unknown')
}
);
// Set the commit based on the version
if (_commitHashTag && Version.id) {
_commitHashTag.textContent = Version.id || _('unknown');
}
// Set the value of the default mode (video/audio)
asyncStorage.getItem(
VIDEO_SETTING,
function onSettingRetrieved(isVideoDefault) {
if (isVideoDefault === null) {
Settings.reset();
} else {
_isVideoDefault = isVideoDefault;
}
_videoDefaultSettings.value = _isVideoDefault;
_videoDefaultSettings.addEventListener(
'change',
function() {
_isVideoDefault = _videoDefaultSettings.options[
_videoDefaultSettings.selectedIndex
].value;
asyncStorage.setItem(
VIDEO_SETTING,
_isVideoDefault
);
}
);
}
);
// Set the value of the default camera if needed
if (!navigator.mozCameras && navigator.mozCameras.getListOfCameras().length < 2) {
_isSingleCamera = true;
_cameraDefaultSettings.parentNode.parentNode.style.display = 'none';
} else {
asyncStorage.getItem(
CAMERA_SETTING,
function onSettingRetrieved(isFrontCamera) {
if (isFrontCamera === null) {
Settings.reset();
} else {
_isFrontCameraDefault = isFrontCamera;
}
_cameraDefaultSettings.value = _isFrontCameraDefault;
_cameraDefaultSettings.addEventListener(
'change',
function() {
_isFrontCameraDefault = _cameraDefaultSettings.options[
_cameraDefaultSettings.selectedIndex
].value;
asyncStorage.setItem(
CAMERA_SETTING,
_isFrontCameraDefault
);
}
);
}
);
}
},
show: function s_show() {
if (!_settingsPanel) {
return;
}
_settingsPanel.classList.add('show');
},
hide: function s_hide() {
if (!_settingsPanel) {
return;
}
_settingsPanel.classList.remove('show');
},
get isFrontalCamera() {
return _isSingleCamera ? false : _isFrontCameraDefault;
}
};
exports.Settings = Settings;
}(this));
| suoko/firefoxos-loop-client-master | js/screens/settings.js | JavaScript | apache-2.0 | 6,400 |
/*
* Copyright 2015 Cognitive Medical Systems, Inc (http://www.cognitivemedciine.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.socraticgrid.hl7.services.eps.model;
/**
* @author Jerry Goodnough
* @version 1.0
* @created 04-Jan-2014 7:26:58 PM
*/
public enum AffiliationRole {
Publisher,
Subscriber,
Administrator,
Owner,
PublishOnly,
SubscribeOnly,
Outcast,
Reviewer
} | SocraticGrid/EPS-API | src/main/java/org/socraticgrid/hl7/services/eps/model/AffiliationRole.java | Java | apache-2.0 | 918 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.scheduler.simple;
import javax.sql.DataSource;
import javax.transaction.TransactionManager;
import org.apache.geronimo.connector.outbound.GenericConnectionManager;
import org.apache.geronimo.connector.outbound.connectionmanagerconfig.LocalTransactions;
import org.apache.geronimo.connector.outbound.connectionmanagerconfig.PoolingSupport;
import org.apache.geronimo.connector.outbound.connectionmanagerconfig.SinglePool;
import org.apache.geronimo.connector.outbound.connectionmanagerconfig.TransactionSupport;
import org.apache.geronimo.connector.outbound.connectiontracking.ConnectionTracker;
import org.apache.geronimo.connector.outbound.connectiontracking.ConnectionTrackingCoordinator;
import org.apache.geronimo.transaction.manager.RecoverableTransactionManager;
import org.apache.ode.utils.GUID;
import org.tranql.connector.jdbc.JDBCDriverMCF;
public class GeronimoDelegateSupport extends DelegateSupport {
private static final int CONNECTION_MAX_WAIT_MILLIS = 30000;
private static final int CONNECTION_MAX_IDLE_MINUTES = 5;
private GenericConnectionManager _connectionManager;
public GeronimoDelegateSupport(TransactionManager txm) throws Exception {
super(txm);
}
@Override
protected void initialize(TransactionManager txm) throws Exception {
_ds = createGeronimoDataSource(txm, "jdbc:h2:mem:" + new GUID().toString(), org.h2.Driver.class.getName(), "sa", "");
setup();
_del = new JdbcDelegate(_ds);
}
private DataSource createGeronimoDataSource(TransactionManager txm, String url, String driverClass, String username,String password) {
TransactionSupport transactionSupport = LocalTransactions.INSTANCE;
ConnectionTracker connectionTracker = new ConnectionTrackingCoordinator();
PoolingSupport poolingSupport = new SinglePool(1, 1,
CONNECTION_MAX_WAIT_MILLIS,
CONNECTION_MAX_IDLE_MINUTES,
true, // match one
false, // match all
false); // select one assume match
_connectionManager = new GenericConnectionManager(
transactionSupport,
poolingSupport,
null,
connectionTracker,
(RecoverableTransactionManager) txm,
getClass().getName(),
getClass().getClassLoader());
JDBCDriverMCF mcf = new JDBCDriverMCF();
try {
mcf.setDriver(driverClass);
mcf.setConnectionURL(url);
if (username != null) {
mcf.setUserName(username);
}
if (password != null) {
mcf.setPassword(password);
}
_connectionManager.doStart();
return (DataSource) mcf.createConnectionFactory(_connectionManager);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
}
| Subasinghe/ode | scheduler-simple/src/test/java/org/apache/ode/scheduler/simple/GeronimoDelegateSupport.java | Java | apache-2.0 | 3,781 |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
var assert = require('assert');
// Test includes
var testutil = require('../../../util/util');
var blobtestutil = require('../../../framework/blob-test-utils');
// Lib includes
var azure = testutil.libRequire('azure');
var Constants = azure.Constants;
var HttpConstants = Constants.HttpConstants;
var containerNames = [];
var containerNamesPrefix = 'cont';
var testPrefix = 'sharedkeylite-tests';
var blobService;
var suiteUtil;
suite('sharedkeylite-tests', function () {
suiteSetup(function (done) {
blobService = azure.createBlobService();
suiteUtil = blobtestutil.createBlobTestUtils(blobService, testPrefix);
suiteUtil.setupSuite(done);
});
suiteTeardown(function (done) {
suiteUtil.teardownSuite(done);
});
setup(function (done) {
suiteUtil.setupTest(done);
});
teardown(function (done) {
suiteUtil.teardownTest(done);
});
test('CreateContainer', function (done) {
blobService.authenticationProvider = new azure.SharedKeyLite(blobService.storageAccount, blobService.storageAccessKey);
var containerName = testutil.generateId(containerNamesPrefix, containerNames, suiteUtil.isMocked);
blobService.createContainer(containerName, function (createError, container1, createContainerResponse) {
assert.equal(createError, null);
assert.notEqual(container1, null);
if (container1) {
assert.notEqual(container1.name, null);
assert.notEqual(container1.etag, null);
assert.notEqual(container1.lastModified, null);
}
assert.equal(createContainerResponse.statusCode, HttpConstants.HttpResponseCodes.Created);
// creating again will result in a duplicate error
blobService.createContainer(containerName, function (createError2, container2) {
assert.equal(createError2.code, Constants.BlobErrorCodeStrings.CONTAINER_ALREADY_EXISTS);
assert.equal(container2, null);
done();
});
});
});
});
| jmspring/azure-sdk-for-node | test/services/blob/internal/sharedkeylite-tests.js | JavaScript | apache-2.0 | 2,589 |
import json
from discord.ext import commands
import addons.checks
class Helper_list:
"""
Management of active helpers.
"""
def __init__(self, bot):
self.bot = bot
print('Addon "{}" loaded'.format(self.__class__.__name__))
@addons.checks.is_staff("Owner")
@commands.command(pass_context=True)
async def addhelper(self, ctx, user, position):
"""Add user as a helper. Owners only."""
if position not in self.bot.helper_roles:
await self.bot.say("💢 That's not a valid position. You can use __{}__".format("__, __".join(self.bot.helper_roles.keys())))
return
member = ctx.message.mentions[0]
addons.checks.helpers[member.id] = position
with open("data/helpers.json", "w") as f:
json.dump(addons.checks.helpers, f)
await self.bot.add_roles(member, self.bot.helpers_role)
await self.bot.say("{} is now a helper. Welcome to the party room!".format(member.mention, position))
@addons.checks.is_staff("Owner")
@commands.command(pass_context=True)
async def delhelper(self, ctx, user):
"""Remove user from helpers. Owners only."""
member = ctx.message.mentions[0]
await self.bot.say(member.name)
addons.checks.helpers.pop(member.id, None)
with open("data/helpers.json", "w") as f:
json.dump(addons.checks.helpers, f)
await self.bot.remove_roles(member, self.bot.helpers_role, *self.bot.helper_roles.values())
await self.bot.say("{} is no longer a helper. Stop by some time!".format(member.mention))
@addons.checks.is_staff("Helper")
@commands.command(pass_context=True)
async def helpon(self, ctx):
"""Gain highlighted helping role. Only needed by Helpers."""
author = ctx.message.author
if author.id not in addons.checks.helpers:
await self.bot.say("You are not listed as a helper, and can't use this.")
return
await self.bot.add_roles(author, self.bot.helper_roles[addons.checks.helpers[author.id]])
await self.bot.say("{} is now actively helping.".format(author.mention))
msg = "🚑 **Elevated: +Help**: {} | {}#{}".format(author.mention, author.name, author.discriminator)
await self.bot.send_message(self.bot.modlogs_channel, msg)
@addons.checks.is_staff("Helper")
@commands.command(pass_context=True)
async def helpoff(self, ctx):
"""Remove highlighted helping role. Only needed by Helpers."""
author = ctx.message.author
if author.id not in addons.checks.helpers:
await self.bot.say("You are not listed as a helper, and can't use this.")
return
await self.bot.remove_roles(author, self.bot.helper_roles[addons.checks.helpers[author.id]])
await self.bot.say("{} is no longer actively helping!".format(author.mention))
msg = "👎🏻 **De-Elevated: -Help**: {} | {}#{}".format(author.mention, author.name, author.discriminator)
await self.bot.send_message(self.bot.modlogs_channel, msg)
def setup(bot):
bot.add_cog(Helper_list(bot))
| 916253/Kurisu | addons/helper_list.py | Python | apache-2.0 | 3,142 |
package org.opendatakit.data;
/**
* Created by clarice on 9/29/15.
*/
public final class RowColorObject {
private final String mRowId;
private final int mRowIndex;
private final String mHexForeground;
private final String mHexBackground;
public RowColorObject(String rowId, int rowIndex, String foreground, String background) {
this.mRowId = rowId;
this.mRowIndex = rowIndex;
this.mHexForeground = foreground;
this.mHexBackground = background;
}
public final String getRowId() { return mRowId; }
public final int getRowIndex() { return mRowIndex; }
public final String getForegroundColor() {
return mHexForeground;
}
public final String getBackgroundColor() {
return mHexBackground;
}
}
| opendatakit/androidcommon | androidcommon_lib/src/main/java/org/opendatakit/data/RowColorObject.java | Java | apache-2.0 | 746 |