code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.visor.verify; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Collection; import java.util.Collections; import java.util.Map; import org.apache.ignite.internal.processors.cache.verify.PartitionKey; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.visor.VisorDataTransferObject; import org.jetbrains.annotations.NotNull; /** * */ public class VisorValidateIndexesJobResult extends VisorDataTransferObject { /** */ private static final long serialVersionUID = 0L; /** Results of indexes validation from node. */ private Map<PartitionKey, ValidateIndexesPartitionResult> partRes; /** Results of reverse indexes validation from node. */ private Map<String, ValidateIndexesPartitionResult> idxRes; /** Integrity check issues. */ private Collection<IndexIntegrityCheckIssue> integrityCheckFailures; /** * @param partRes Results of indexes validation from node. * @param idxRes Results of reverse indexes validation from node. * @param integrityCheckFailures Collection of indexes integrity check failures. */ public VisorValidateIndexesJobResult( @NotNull Map<PartitionKey, ValidateIndexesPartitionResult> partRes, @NotNull Map<String, ValidateIndexesPartitionResult> idxRes, @NotNull Collection<IndexIntegrityCheckIssue> integrityCheckFailures ) { this.partRes = partRes; this.idxRes = idxRes; this.integrityCheckFailures = integrityCheckFailures; } /** * For externalization only. */ public VisorValidateIndexesJobResult() { } /** {@inheritDoc} */ @Override public byte getProtocolVersion() { return V3; } /** * @return Results of indexes validation from node. */ public Map<PartitionKey, ValidateIndexesPartitionResult> partitionResult() { return partRes; } /** * @return Results of reverse indexes validation from node. */ public Map<String, ValidateIndexesPartitionResult> indexResult() { return idxRes == null ? Collections.emptyMap() : idxRes; } /** * @return Collection of failed integrity checks. */ public Collection<IndexIntegrityCheckIssue> integrityCheckFailures() { return integrityCheckFailures == null ? Collections.emptyList() : integrityCheckFailures; } /** * @return {@code true} If any indexes issues found on node, otherwise returns {@code false}. */ public boolean hasIssues() { return (integrityCheckFailures != null && !integrityCheckFailures.isEmpty()) || (partRes != null && partRes.entrySet().stream().anyMatch(e -> !e.getValue().issues().isEmpty())) || (idxRes != null && idxRes.entrySet().stream().anyMatch(e -> !e.getValue().issues().isEmpty())); } /** {@inheritDoc} */ @Override protected void writeExternalData(ObjectOutput out) throws IOException { U.writeMap(out, partRes); U.writeMap(out, idxRes); U.writeCollection(out, integrityCheckFailures); } /** {@inheritDoc} */ @Override protected void readExternalData(byte protoVer, ObjectInput in) throws IOException, ClassNotFoundException { partRes = U.readMap(in); if (protoVer >= V2) idxRes = U.readMap(in); if (protoVer >= V3) integrityCheckFailures = U.readCollection(in); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(VisorValidateIndexesJobResult.class, this); } }
ptupitsyn/ignite
modules/core/src/main/java/org/apache/ignite/internal/visor/verify/VisorValidateIndexesJobResult.java
Java
apache-2.0
4,517
/* * (c) Copyright 2021 Micro Focus * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.content.active_directory.entities; import java.util.List; public interface CreateUserInputInterface { String getHost(); String getDistinguishedName(); String getUserCommonName(); String getUserPassword(); String getSAMAccountName(); String getUsername(); String getPassword(); String getProtocol(); Boolean getTrustAllRoots(); String getTrustKeystore(); String getTrustPassword(); Boolean getEscapeChars(); String getTimeout(); String getProxyHost(); int getProxyPort(); String getProxyUsername(); String getProxyPassword(); String getX509HostnameVerifier(); String getTlsVersion(); List<String> getAllowedCiphers(); }
CloudSlang/cs-actions
cs-active-directory/src/main/java/io/cloudslang/content/active_directory/entities/CreateUserInputInterface.java
Java
apache-2.0
1,364
/* Copyright 2019 The Vitess Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* This test makes sure encrypted transport over gRPC works. The security chains are setup the following way: * root CA * vttablet server CA * vttablet server instance cert/key * vttablet client CA * vttablet client 1 cert/key * vtgate server CA * vtgate server instance cert/key (common name is 'localhost') * vtgate client CA * vtgate client 1 cert/key * vtgate client 2 cert/key The following table shows all the checks we perform: process: will check its peer is signed by: for link: vttablet vttablet client CA vtgate -> vttablet vtgate vttablet server CA vtgate -> vttablet vtgate vtgate client CA client -> vtgate client vtgate server CA client -> vtgate Additionally, we have the following constraints: - the client certificate common name is used as immediate caller ID by vtgate, and forwarded to vttablet. This allows us to use table ACLs on the vttablet side. - the vtgate server certificate common name is set to 'localhost' so it matches the hostname dialed by the vtgate clients. This is not a requirement for the go client, that can set its expected server name. However, the python gRPC client doesn't have the ability to set the server name, so they must match. - the python client needs to have the full chain for the server validation (that is 'vtgate server CA' + 'root CA'). A go client doesn't. So we read both below when using the python client, but we only pass the intermediate cert to the go clients (for vtgate -> vttablet link). */ package encryptedtransport import ( "flag" "fmt" "io/ioutil" "os" "os/exec" "path" "testing" "vitess.io/vitess/go/test/endtoend/encryption" "vitess.io/vitess/go/vt/proto/vtrpc" "vitess.io/vitess/go/vt/vterrors" "context" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "vitess.io/vitess/go/test/endtoend/cluster" "vitess.io/vitess/go/vt/grpcclient" "vitess.io/vitess/go/vt/log" querypb "vitess.io/vitess/go/vt/proto/query" vtgatepb "vitess.io/vitess/go/vt/proto/vtgate" vtgateservicepb "vitess.io/vitess/go/vt/proto/vtgateservice" ) var ( clusterInstance *cluster.LocalProcessCluster createVtInsertTest = `create table vt_insert_test ( id bigint auto_increment, msg varchar(64), keyspace_id bigint(20) unsigned NOT NULL, primary key (id) ) Engine = InnoDB` keyspace = "test_keyspace" hostname = "localhost" shardName = "0" cell = "zone1" certDirectory string grpcCert = "" grpcKey = "" grpcCa = "" grpcName = "" ) func TestSecureTransport(t *testing.T) { defer cluster.PanicHandler(t) flag.Parse() // initialize cluster _, err := clusterSetUp(t) require.Nil(t, err, "setup failed") masterTablet := *clusterInstance.Keyspaces[0].Shards[0].Vttablets[0] replicaTablet := *clusterInstance.Keyspaces[0].Shards[0].Vttablets[1] // creating table_acl_config.json file tableACLConfigJSON := path.Join(certDirectory, "table_acl_config.json") f, err := os.Create(tableACLConfigJSON) require.NoError(t, err) _, err = f.WriteString(`{ "table_groups": [ { "table_names_or_prefixes": ["vt_insert_test"], "readers": ["vtgate client 1"], "writers": ["vtgate client 1"], "admins": ["vtgate client 1"] } ] }`) require.NoError(t, err) err = f.Close() require.NoError(t, err) // start the tablets for _, tablet := range []cluster.Vttablet{masterTablet, replicaTablet} { tablet.VttabletProcess.ExtraArgs = append(tablet.VttabletProcess.ExtraArgs, "-table-acl-config", tableACLConfigJSON, "-queryserver-config-strict-table-acl") tablet.VttabletProcess.ExtraArgs = append(tablet.VttabletProcess.ExtraArgs, serverExtraArguments("vttablet-server-instance", "vttablet-client")...) err = tablet.VttabletProcess.Setup() require.NoError(t, err) } // setup replication var vtctlClientArgs []string vtctlClientTmArgs := append(vtctlClientArgs, tmclientExtraArgs("vttablet-client-1")...) // Reparenting vtctlClientArgs = append(vtctlClientTmArgs, "InitShardMaster", "-force", "test_keyspace/0", masterTablet.Alias) err = clusterInstance.VtctlProcess.ExecuteCommand(vtctlClientArgs...) require.NoError(t, err) // Apply schema var vtctlApplySchemaArgs = append(vtctlClientTmArgs, "ApplySchema", "-sql", createVtInsertTest, "test_keyspace") err = clusterInstance.VtctlProcess.ExecuteCommand(vtctlApplySchemaArgs...) require.NoError(t, err) for _, tablet := range []cluster.Vttablet{masterTablet, replicaTablet} { var vtctlTabletArgs []string vtctlTabletArgs = append(vtctlTabletArgs, tmclientExtraArgs("vttablet-client-1")...) vtctlTabletArgs = append(vtctlTabletArgs, "RunHealthCheck", tablet.Alias) _, err = clusterInstance.VtctlProcess.ExecuteCommandWithOutput(vtctlTabletArgs...) require.NoError(t, err) } // start vtgate clusterInstance.VtGateExtraArgs = append(clusterInstance.VtGateExtraArgs, tabletConnExtraArgs("vttablet-client-1")...) clusterInstance.VtGateExtraArgs = append(clusterInstance.VtGateExtraArgs, serverExtraArguments("vtgate-server-instance", "vtgate-client")...) err = clusterInstance.StartVtgate() require.NoError(t, err) grpcAddress := fmt.Sprintf("%s:%d", "localhost", clusterInstance.VtgateProcess.GrpcPort) // 'vtgate client 1' is authorized to access vt_insert_test setCreds(t, "vtgate-client-1", "vtgate-server") ctx := context.Background() request := getRequest("select * from vt_insert_test") vc, err := getVitessClient(grpcAddress) require.NoError(t, err) qr, err := vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.NoError(t, err) // 'vtgate client 2' is not authorized to access vt_insert_test setCreds(t, "vtgate-client-2", "vtgate-server") request = getRequest("select * from vt_insert_test") vc, err = getVitessClient(grpcAddress) require.NoError(t, err) qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.Error(t, err) assert.Contains(t, err.Error(), "table acl error") assert.Contains(t, err.Error(), "cannot run Select on table") // now restart vtgate in the mode where we don't use SSL // for client connections, but we copy effective caller id // into immediate caller id. clusterInstance.VtGateExtraArgs = []string{"-grpc_use_effective_callerid"} clusterInstance.VtGateExtraArgs = append(clusterInstance.VtGateExtraArgs, tabletConnExtraArgs("vttablet-client-1")...) err = clusterInstance.RestartVtgate() require.NoError(t, err) grpcAddress = fmt.Sprintf("%s:%d", "localhost", clusterInstance.VtgateProcess.GrpcPort) setSSLInfoEmpty() // get vitess client vc, err = getVitessClient(grpcAddress) require.NoError(t, err) // test with empty effective caller Id request = getRequest("select * from vt_insert_test") qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.Error(t, err) assert.Contains(t, err.Error(), "table acl error") assert.Contains(t, err.Error(), "cannot run Select on table") // 'vtgate client 1' is authorized to access vt_insert_test callerID := &vtrpc.CallerID{ Principal: "vtgate client 1", } request = getRequestWithCallerID(callerID, "select * from vt_insert_test") qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.NoError(t, err) // 'vtgate client 2' is not authorized to access vt_insert_test callerID = &vtrpc.CallerID{ Principal: "vtgate client 2", } request = getRequestWithCallerID(callerID, "select * from vt_insert_test") qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.Error(t, err) assert.Contains(t, err.Error(), "table acl error") assert.Contains(t, err.Error(), "cannot run Select on table") clusterInstance.Teardown() } func clusterSetUp(t *testing.T) (int, error) { var mysqlProcesses []*exec.Cmd clusterInstance = cluster.NewCluster(cell, hostname) // Start topo server if err := clusterInstance.StartTopo(); err != nil { return 1, err } // create all certs log.Info("Creating certificates") certDirectory = path.Join(clusterInstance.TmpDirectory, "certs") _ = encryption.CreateDirectory(certDirectory, 0700) err := encryption.ExecuteVttlstestCommand("-root", certDirectory, "CreateCA") require.NoError(t, err) err = createSignedCert("ca", "01", "vttablet-server", "vttablet server CA") require.NoError(t, err) err = createSignedCert("ca", "02", "vttablet-client", "vttablet client CA") require.NoError(t, err) err = createSignedCert("ca", "03", "vtgate-server", "vtgate server CA") require.NoError(t, err) err = createSignedCert("ca", "04", "vtgate-client", "vtgate client CA") require.NoError(t, err) err = createSignedCert("vttablet-server", "01", "vttablet-server-instance", "vttablet server instance") require.NoError(t, err) err = createSignedCert("vttablet-client", "01", "vttablet-client-1", "vttablet client 1") require.NoError(t, err) err = createSignedCert("vtgate-server", "01", "vtgate-server-instance", "localhost") require.NoError(t, err) err = createSignedCert("vtgate-client", "01", "vtgate-client-1", "vtgate client 1") require.NoError(t, err) err = createSignedCert("vtgate-client", "02", "vtgate-client-2", "vtgate client 2") require.NoError(t, err) for _, keyspaceStr := range []string{keyspace} { KeyspacePtr := &cluster.Keyspace{Name: keyspaceStr} keyspace := *KeyspacePtr if err := clusterInstance.VtctlProcess.CreateKeyspace(keyspace.Name); err != nil { return 1, err } shard := &cluster.Shard{ Name: shardName, } for i := 0; i < 2; i++ { // instantiate vttablet object with reserved ports tablet := clusterInstance.NewVttabletInstance("replica", 0, cell) // Start Mysqlctl process tablet.MysqlctlProcess = *cluster.MysqlCtlProcessInstance(tablet.TabletUID, tablet.MySQLPort, clusterInstance.TmpDirectory) proc, err := tablet.MysqlctlProcess.StartProcess() if err != nil { return 1, err } mysqlProcesses = append(mysqlProcesses, proc) // start vttablet process tablet.VttabletProcess = cluster.VttabletProcessInstance(tablet.HTTPPort, tablet.GrpcPort, tablet.TabletUID, clusterInstance.Cell, shardName, keyspace.Name, clusterInstance.VtctldProcess.Port, tablet.Type, clusterInstance.TopoProcess.Port, clusterInstance.Hostname, clusterInstance.TmpDirectory, clusterInstance.VtTabletExtraArgs, clusterInstance.EnableSemiSync) tablet.Alias = tablet.VttabletProcess.TabletPath shard.Vttablets = append(shard.Vttablets, tablet) } keyspace.Shards = append(keyspace.Shards, *shard) clusterInstance.Keyspaces = append(clusterInstance.Keyspaces, keyspace) } for _, proc := range mysqlProcesses { err := proc.Wait() if err != nil { return 1, err } } return 0, nil } func createSignedCert(ca string, serial string, name string, commonName string) error { log.Infof("Creating signed cert and key %s", commonName) tmpProcess := exec.Command( "vttlstest", "-root", certDirectory, "CreateSignedCert", "-parent", ca, "-serial", serial, "-common_name", commonName, name) return tmpProcess.Run() } func serverExtraArguments(name string, ca string) []string { args := []string{"-grpc_cert", certDirectory + "/" + name + "-cert.pem", "-grpc_key", certDirectory + "/" + name + "-key.pem", "-grpc_ca", certDirectory + "/" + ca + "-cert.pem"} return args } func tmclientExtraArgs(name string) []string { ca := "vttablet-server" var args = []string{"-tablet_manager_grpc_cert", certDirectory + "/" + name + "-cert.pem", "-tablet_manager_grpc_key", certDirectory + "/" + name + "-key.pem", "-tablet_manager_grpc_ca", certDirectory + "/" + ca + "-cert.pem", "-tablet_manager_grpc_server_name", "vttablet server instance"} return args } func tabletConnExtraArgs(name string) []string { ca := "vttablet-server" args := []string{"-tablet_grpc_cert", certDirectory + "/" + name + "-cert.pem", "-tablet_grpc_key", certDirectory + "/" + name + "-key.pem", "-tablet_grpc_ca", certDirectory + "/" + ca + "-cert.pem", "-tablet_grpc_server_name", "vttablet server instance"} return args } func getVitessClient(addr string) (vtgateservicepb.VitessClient, error) { opt, err := grpcclient.SecureDialOption(grpcCert, grpcKey, grpcCa, grpcName) if err != nil { return nil, err } cc, err := grpcclient.Dial(addr, grpcclient.FailFast(false), opt) if err != nil { return nil, err } c := vtgateservicepb.NewVitessClient(cc) return c, nil } func setCreds(t *testing.T, name string, ca string) { f1, err := os.Open(path.Join(certDirectory, "ca-cert.pem")) require.NoError(t, err) b1, err := ioutil.ReadAll(f1) require.NoError(t, err) f2, err := os.Open(path.Join(certDirectory, ca+"-cert.pem")) require.NoError(t, err) b2, err := ioutil.ReadAll(f2) require.NoError(t, err) caContent := append(b1, b2...) fileName := "ca-" + name + ".pem" caVtgateClient := path.Join(certDirectory, fileName) f, err := os.Create(caVtgateClient) require.NoError(t, err) _, err = f.Write(caContent) require.NoError(t, err) grpcCa = caVtgateClient grpcKey = path.Join(certDirectory, name+"-key.pem") grpcCert = path.Join(certDirectory, name+"-cert.pem") err = f.Close() require.NoError(t, err) err = f2.Close() require.NoError(t, err) err = f1.Close() require.NoError(t, err) } func setSSLInfoEmpty() { grpcCa = "" grpcCert = "" grpcKey = "" grpcName = "" } func getSession() *vtgatepb.Session { return &vtgatepb.Session{ TargetString: "test_keyspace:0@master", } } func getRequestWithCallerID(callerID *vtrpc.CallerID, sql string) *vtgatepb.ExecuteRequest { session := getSession() return &vtgatepb.ExecuteRequest{ CallerId: callerID, Session: session, Query: &querypb.BoundQuery{ Sql: sql, }, } } func getRequest(sql string) *vtgatepb.ExecuteRequest { session := getSession() return &vtgatepb.ExecuteRequest{ Session: session, Query: &querypb.BoundQuery{ Sql: sql, }, } }
mahak/vitess
go/test/endtoend/encryption/encryptedtransport/encrypted_transport_test.go
GO
apache-2.0
14,700
import os import logging from mongodb_consistent_backup.Common import LocalCommand from mongodb_consistent_backup.Pipeline import PoolThread class TarThread(PoolThread): def __init__(self, backup_dir, output_file, compression='none', verbose=False, binary="tar"): super(TarThread, self).__init__(self.__class__.__name__, compression) self.compression_method = compression self.backup_dir = backup_dir self.output_file = output_file self.verbose = verbose self.binary = binary self._command = None def close(self, exit_code=None, frame=None): if self._command and not self.stopped: logging.debug("Stopping running tar command: %s" % self._command.command) del exit_code del frame self._command.close() self.stopped = True def run(self): if os.path.isdir(self.backup_dir): if not os.path.isfile(self.output_file): try: backup_base_dir = os.path.dirname(self.backup_dir) backup_base_name = os.path.basename(self.backup_dir) log_msg = "Archiving directory: %s" % self.backup_dir cmd_flags = ["-C", backup_base_dir, "-c", "-f", self.output_file, "--remove-files"] if self.do_gzip(): log_msg = "Archiving and compressing directory: %s" % self.backup_dir cmd_flags.append("-z") cmd_flags.append(backup_base_name) logging.info(log_msg) self.running = True self._command = LocalCommand(self.binary, cmd_flags, self.verbose) self.exit_code = self._command.run() except Exception, e: return self.result(False, "Failed archiving file: %s!" % self.output_file, e) finally: self.running = False self.stopped = True self.completed = True else: return self.result(False, "Output file: %s already exists!" % self.output_file, None) return self.result(True, "Archiving successful.", None) def result(self, success, message, error): return { "success": success, "message": message, "error": error, "directory": self.backup_dir, "exit_code": self.exit_code }
Percona-Lab/mongodb_consistent_backup
mongodb_consistent_backup/Archive/Tar/TarThread.py
Python
apache-2.0
2,555
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.presentation.group.area; import java.util.Iterator; import java.util.List; import org.apache.commons.lang.StringEscapeUtils; import org.olat.data.group.BusinessGroup; import org.olat.data.group.area.BGArea; import org.olat.data.group.area.BGAreaDao; import org.olat.data.group.area.BGAreaDaoImpl; import org.olat.data.group.context.BGContext; import org.olat.data.group.context.BGContextDao; import org.olat.data.group.context.BGContextDaoImpl; import org.olat.lms.activitylogging.LoggingResourceable; import org.olat.lms.activitylogging.ThreadLocalUserActivityLogger; import org.olat.lms.group.BusinessGroupService; import org.olat.lms.group.GroupLoggingAction; import org.olat.presentation.framework.core.UserRequest; import org.olat.presentation.framework.core.components.Component; import org.olat.presentation.framework.core.components.choice.Choice; import org.olat.presentation.framework.core.components.tabbedpane.TabbedPane; import org.olat.presentation.framework.core.components.velocity.VelocityContainer; import org.olat.presentation.framework.core.control.Controller; import org.olat.presentation.framework.core.control.WindowControl; import org.olat.presentation.framework.core.control.controller.BasicController; import org.olat.presentation.framework.core.translator.PackageTranslator; import org.olat.presentation.framework.core.translator.PackageUtil; import org.olat.presentation.framework.core.translator.Translator; import org.olat.system.event.Event; import org.olat.system.spring.CoreSpringFactory; /** * Description:<BR> * This controller can be used to edit the business grou area metadata and associate business groups to the business group area. * <P> * Initial Date: Aug 30, 2004 * * @author gnaegi */ public class BGAreaEditController extends BasicController { private static final String PACKAGE = PackageUtil.getPackageName(BGAreaEditController.class); private static final String VELOCITY_ROOT = PackageUtil.getPackageVelocityRoot(PACKAGE); // helpers private final Translator trans; // GUI components private final TabbedPane tabbedPane; private VelocityContainer editVC, detailsTabVC, groupsTabVC; private BGAreaFormController areaController; private GroupsToAreaDataModel groupsDataModel; private Choice groupsChoice; // area, context and group references private BGArea area; private final BGContext bgContext; private List allGroups, inAreaGroups; // managers private final BGAreaDao areaManager; private final BGContextDao contextManager; /** * Constructor for the business group area edit controller * * @param ureq * The user request * @param wControl * The window control * @param area * The business group area */ public BGAreaEditController(final UserRequest ureq, final WindowControl wControl, final BGArea area) { super(ureq, wControl); this.trans = new PackageTranslator(PACKAGE, ureq.getLocale()); this.area = area; this.areaManager = BGAreaDaoImpl.getInstance(); this.bgContext = area.getGroupContext(); this.contextManager = BGContextDaoImpl.getInstance(); // tabbed pane tabbedPane = new TabbedPane("tabbedPane", ureq.getLocale()); tabbedPane.addListener(this); // details tab initAndAddDetailsTab(ureq, wControl); // groups tab initAndAddGroupsTab(); // initialize main view initEditVC(); putInitialPanel(this.editVC); } /** * initialize the main velocity wrapper container */ private void initEditVC() { editVC = new VelocityContainer("edit", VELOCITY_ROOT + "/edit.html", trans, this); editVC.put("tabbedpane", tabbedPane); editVC.contextPut("title", trans.translate("area.edit.title", new String[] { StringEscapeUtils.escapeHtml(this.area.getName()).toString() })); } /** * initialize the area details tab */ private void initAndAddDetailsTab(final UserRequest ureq, final WindowControl wControl) { this.detailsTabVC = new VelocityContainer("detailstab", VELOCITY_ROOT + "/detailstab.html", this.trans, this); // TODO:pb: refactor BGControllerFactory.create..AreaController to be // usefull here if (this.areaController != null) { removeAsListenerAndDispose(this.areaController); } this.areaController = new BGAreaFormController(ureq, wControl, this.area, false); listenTo(this.areaController); this.detailsTabVC.put("areaForm", this.areaController.getInitialComponent()); this.tabbedPane.addTab(this.trans.translate("tab.details"), this.detailsTabVC); } /** * initalize the group to area association tab */ private void initAndAddGroupsTab() { groupsTabVC = new VelocityContainer("groupstab", VELOCITY_ROOT + "/groupstab.html", trans, this); tabbedPane.addTab(trans.translate("tab.groups"), groupsTabVC); this.allGroups = contextManager.getGroupsOfBGContext(this.bgContext); this.inAreaGroups = areaManager.findBusinessGroupsOfArea(this.area); this.groupsDataModel = new GroupsToAreaDataModel(this.allGroups, this.inAreaGroups); groupsChoice = new Choice("groupsChoice", trans); groupsChoice.setSubmitKey("submit"); groupsChoice.setCancelKey("cancel"); groupsChoice.setTableDataModel(groupsDataModel); groupsChoice.addListener(this); groupsTabVC.put(groupsChoice); groupsTabVC.contextPut("noGroupsFound", (allGroups.size() > 0 ? Boolean.FALSE : Boolean.TRUE)); } /** */ @Override protected void event(final UserRequest ureq, final Component source, final Event event) { if (source == this.groupsChoice) { if (event == Choice.EVNT_VALIDATION_OK) { doUpdateGroupAreaRelations(); // do logging if (this.inAreaGroups.size() == 0) { ThreadLocalUserActivityLogger.log(GroupLoggingAction.BGAREA_UPDATED_NOW_EMPTY, getClass()); } else { for (final Iterator it = inAreaGroups.iterator(); it.hasNext();) { final BusinessGroup aGroup = (BusinessGroup) it.next(); ThreadLocalUserActivityLogger.log(GroupLoggingAction.BGAREA_UPDATED_MEMBER_GROUP, getClass(), LoggingResourceable.wrap(aGroup)); } } } } } @Override protected void event(final UserRequest ureq, final Controller source, final Event event) { if (source == this.areaController) { if (event == Event.DONE_EVENT) { final BGArea updatedArea = doAreaUpdate(); if (updatedArea == null) { this.areaController.resetAreaName(); getWindowControl().setWarning(this.trans.translate("error.area.name.exists")); } else { this.area = updatedArea; this.editVC.contextPut("title", this.trans.translate("area.edit.title", new String[] { StringEscapeUtils.escapeHtml(this.area.getName()).toString() })); } } else if (event == Event.CANCELLED_EVENT) { // area might have been changed, reload from db this.area = this.areaManager.reloadArea(this.area); // TODO:pb: refactor BGControllerFactory.create..AreaController to be // usefull here if (this.areaController != null) { removeAsListenerAndDispose(this.areaController); } this.areaController = new BGAreaFormController(ureq, getWindowControl(), this.area, false); listenTo(this.areaController); this.detailsTabVC.put("areaForm", this.areaController.getInitialComponent()); } } } /** * Update a group area * * @return the updated area */ public BGArea doAreaUpdate() { this.area.setName(this.areaController.getAreaName()); this.area.setDescription(this.areaController.getAreaDescription()); return this.areaManager.updateBGArea(this.area); } /** * Update the groups associated to this area: remove and add groups */ private void doUpdateGroupAreaRelations() { BusinessGroupService businessGroupService = (BusinessGroupService) CoreSpringFactory.getBean(BusinessGroupService.class); // 1) add groups to area final List addedGroups = groupsChoice.getAddedRows(); Iterator iterator = addedGroups.iterator(); while (iterator.hasNext()) { final Integer position = (Integer) iterator.next(); BusinessGroup group = groupsDataModel.getGroup(position.intValue()); // refresh group to prevent stale object exception and context proxy // issues group = businessGroupService.loadBusinessGroup(group); // refresh group also in table model this.allGroups.set(position.intValue(), group); // add group now to area and update in area group list areaManager.addBGToBGArea(group, area); this.inAreaGroups.add(group); } // 2) remove groups from area final List removedGroups = groupsChoice.getRemovedRows(); iterator = removedGroups.iterator(); while (iterator.hasNext()) { final Integer position = (Integer) iterator.next(); final BusinessGroup group = groupsDataModel.getGroup(position.intValue()); areaManager.removeBGFromArea(group, area); this.inAreaGroups.remove(group); } } /** */ @Override protected void doDispose() { // don't dispose anything } }
huihoo/olat
olat7.8/src/main/java/org/olat/presentation/group/area/BGAreaEditController.java
Java
apache-2.0
10,780
package org.hyperimage.connector.fedora3.ws; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlElementDecl; import javax.xml.bind.annotation.XmlRegistry; import javax.xml.namespace.QName; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the org.hyperimage.connector.fedora3.ws package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { private final static QName _AssetURN_QNAME = new QName("http://connector.ws.hyperimage.org/", "assetURN"); private final static QName _Token_QNAME = new QName("http://connector.ws.hyperimage.org/", "token"); private final static QName _GetAssetPreviewDataResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetPreviewDataResponse"); private final static QName _ParentURN_QNAME = new QName("http://connector.ws.hyperimage.org/", "parentURN"); private final static QName _Username_QNAME = new QName("http://connector.ws.hyperimage.org/", "username"); private final static QName _GetAssetData_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetData"); private final static QName _GetAssetPreviewData_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetPreviewData"); private final static QName _GetHierarchyLevelResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getHierarchyLevelResponse"); private final static QName _Authenticate_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "authenticate"); private final static QName _HIWSLoggedException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSLoggedException"); private final static QName _GetMetadataRecord_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getMetadataRecord"); private final static QName _HIWSNotBinaryException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSNotBinaryException"); private final static QName _Session_QNAME = new QName("http://connector.ws.hyperimage.org/", "session"); private final static QName _HIWSDCMetadataException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSDCMetadataException"); private final static QName _HIWSAuthException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSAuthException"); private final static QName _HIWSAssetNotFoundException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSAssetNotFoundException"); private final static QName _GetWSVersion_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getWSVersion"); private final static QName _GetMetadataRecordResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getMetadataRecordResponse"); private final static QName _HIWSUTF8EncodingException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSUTF8EncodingException"); private final static QName _GetWSVersionResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getWSVersionResponse"); private final static QName _GetReposInfo_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getReposInfo"); private final static QName _HIWSXMLParserException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSXMLParserException"); private final static QName _AuthenticateResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "authenticateResponse"); private final static QName _GetAssetDataResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetDataResponse"); private final static QName _GetHierarchyLevel_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getHierarchyLevel"); private final static QName _GetReposInfoResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getReposInfoResponse"); private final static QName _GetAssetPreviewDataResponseReturn_QNAME = new QName("", "return"); /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org.hyperimage.connector.fedora3.ws * */ public ObjectFactory() { } /** * Create an instance of {@link HIWSDCMetadataException } * */ public HIWSDCMetadataException createHIWSDCMetadataException() { return new HIWSDCMetadataException(); } /** * Create an instance of {@link GetAssetDataResponse } * */ public GetAssetDataResponse createGetAssetDataResponse() { return new GetAssetDataResponse(); } /** * Create an instance of {@link HIWSAuthException } * */ public HIWSAuthException createHIWSAuthException() { return new HIWSAuthException(); } /** * Create an instance of {@link HIWSAssetNotFoundException } * */ public HIWSAssetNotFoundException createHIWSAssetNotFoundException() { return new HIWSAssetNotFoundException(); } /** * Create an instance of {@link HIWSNotBinaryException } * */ public HIWSNotBinaryException createHIWSNotBinaryException() { return new HIWSNotBinaryException(); } /** * Create an instance of {@link GetHierarchyLevelResponse } * */ public GetHierarchyLevelResponse createGetHierarchyLevelResponse() { return new GetHierarchyLevelResponse(); } /** * Create an instance of {@link Authenticate } * */ public Authenticate createAuthenticate() { return new Authenticate(); } /** * Create an instance of {@link HiHierarchyLevel } * */ public HiHierarchyLevel createHiHierarchyLevel() { return new HiHierarchyLevel(); } /** * Create an instance of {@link HIWSLoggedException } * */ public HIWSLoggedException createHIWSLoggedException() { return new HIWSLoggedException(); } /** * Create an instance of {@link GetHierarchyLevel } * */ public GetHierarchyLevel createGetHierarchyLevel() { return new GetHierarchyLevel(); } /** * Create an instance of {@link AuthenticateResponse } * */ public AuthenticateResponse createAuthenticateResponse() { return new AuthenticateResponse(); } /** * Create an instance of {@link GetReposInfoResponse } * */ public GetReposInfoResponse createGetReposInfoResponse() { return new GetReposInfoResponse(); } /** * Create an instance of {@link GetAssetPreviewDataResponse } * */ public GetAssetPreviewDataResponse createGetAssetPreviewDataResponse() { return new GetAssetPreviewDataResponse(); } /** * Create an instance of {@link GetWSVersion } * */ public GetWSVersion createGetWSVersion() { return new GetWSVersion(); } /** * Create an instance of {@link GetMetadataRecordResponse } * */ public GetMetadataRecordResponse createGetMetadataRecordResponse() { return new GetMetadataRecordResponse(); } /** * Create an instance of {@link HiMetadataRecord } * */ public HiMetadataRecord createHiMetadataRecord() { return new HiMetadataRecord(); } /** * Create an instance of {@link HiTypedDatastream } * */ public HiTypedDatastream createHiTypedDatastream() { return new HiTypedDatastream(); } /** * Create an instance of {@link HIWSXMLParserException } * */ public HIWSXMLParserException createHIWSXMLParserException() { return new HIWSXMLParserException(); } /** * Create an instance of {@link GetMetadataRecord } * */ public GetMetadataRecord createGetMetadataRecord() { return new GetMetadataRecord(); } /** * Create an instance of {@link GetAssetPreviewData } * */ public GetAssetPreviewData createGetAssetPreviewData() { return new GetAssetPreviewData(); } /** * Create an instance of {@link HIWSUTF8EncodingException } * */ public HIWSUTF8EncodingException createHIWSUTF8EncodingException() { return new HIWSUTF8EncodingException(); } /** * Create an instance of {@link GetReposInfo } * */ public GetReposInfo createGetReposInfo() { return new GetReposInfo(); } /** * Create an instance of {@link GetWSVersionResponse } * */ public GetWSVersionResponse createGetWSVersionResponse() { return new GetWSVersionResponse(); } /** * Create an instance of {@link GetAssetData } * */ public GetAssetData createGetAssetData() { return new GetAssetData(); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "assetURN") public JAXBElement<String> createAssetURN(String value) { return new JAXBElement<String>(_AssetURN_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "token") public JAXBElement<String> createToken(String value) { return new JAXBElement<String>(_Token_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetPreviewDataResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetPreviewDataResponse") public JAXBElement<GetAssetPreviewDataResponse> createGetAssetPreviewDataResponse(GetAssetPreviewDataResponse value) { return new JAXBElement<GetAssetPreviewDataResponse>(_GetAssetPreviewDataResponse_QNAME, GetAssetPreviewDataResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "parentURN") public JAXBElement<String> createParentURN(String value) { return new JAXBElement<String>(_ParentURN_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "username") public JAXBElement<String> createUsername(String value) { return new JAXBElement<String>(_Username_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetData }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetData") public JAXBElement<GetAssetData> createGetAssetData(GetAssetData value) { return new JAXBElement<GetAssetData>(_GetAssetData_QNAME, GetAssetData.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetPreviewData }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetPreviewData") public JAXBElement<GetAssetPreviewData> createGetAssetPreviewData(GetAssetPreviewData value) { return new JAXBElement<GetAssetPreviewData>(_GetAssetPreviewData_QNAME, GetAssetPreviewData.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetHierarchyLevelResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getHierarchyLevelResponse") public JAXBElement<GetHierarchyLevelResponse> createGetHierarchyLevelResponse(GetHierarchyLevelResponse value) { return new JAXBElement<GetHierarchyLevelResponse>(_GetHierarchyLevelResponse_QNAME, GetHierarchyLevelResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link Authenticate }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "authenticate") public JAXBElement<Authenticate> createAuthenticate(Authenticate value) { return new JAXBElement<Authenticate>(_Authenticate_QNAME, Authenticate.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSLoggedException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSLoggedException") public JAXBElement<HIWSLoggedException> createHIWSLoggedException(HIWSLoggedException value) { return new JAXBElement<HIWSLoggedException>(_HIWSLoggedException_QNAME, HIWSLoggedException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetMetadataRecord }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getMetadataRecord") public JAXBElement<GetMetadataRecord> createGetMetadataRecord(GetMetadataRecord value) { return new JAXBElement<GetMetadataRecord>(_GetMetadataRecord_QNAME, GetMetadataRecord.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSNotBinaryException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSNotBinaryException") public JAXBElement<HIWSNotBinaryException> createHIWSNotBinaryException(HIWSNotBinaryException value) { return new JAXBElement<HIWSNotBinaryException>(_HIWSNotBinaryException_QNAME, HIWSNotBinaryException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "session") public JAXBElement<String> createSession(String value) { return new JAXBElement<String>(_Session_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSDCMetadataException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSDCMetadataException") public JAXBElement<HIWSDCMetadataException> createHIWSDCMetadataException(HIWSDCMetadataException value) { return new JAXBElement<HIWSDCMetadataException>(_HIWSDCMetadataException_QNAME, HIWSDCMetadataException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSAuthException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSAuthException") public JAXBElement<HIWSAuthException> createHIWSAuthException(HIWSAuthException value) { return new JAXBElement<HIWSAuthException>(_HIWSAuthException_QNAME, HIWSAuthException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSAssetNotFoundException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSAssetNotFoundException") public JAXBElement<HIWSAssetNotFoundException> createHIWSAssetNotFoundException(HIWSAssetNotFoundException value) { return new JAXBElement<HIWSAssetNotFoundException>(_HIWSAssetNotFoundException_QNAME, HIWSAssetNotFoundException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetWSVersion }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getWSVersion") public JAXBElement<GetWSVersion> createGetWSVersion(GetWSVersion value) { return new JAXBElement<GetWSVersion>(_GetWSVersion_QNAME, GetWSVersion.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetMetadataRecordResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getMetadataRecordResponse") public JAXBElement<GetMetadataRecordResponse> createGetMetadataRecordResponse(GetMetadataRecordResponse value) { return new JAXBElement<GetMetadataRecordResponse>(_GetMetadataRecordResponse_QNAME, GetMetadataRecordResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSUTF8EncodingException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSUTF8EncodingException") public JAXBElement<HIWSUTF8EncodingException> createHIWSUTF8EncodingException(HIWSUTF8EncodingException value) { return new JAXBElement<HIWSUTF8EncodingException>(_HIWSUTF8EncodingException_QNAME, HIWSUTF8EncodingException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetWSVersionResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getWSVersionResponse") public JAXBElement<GetWSVersionResponse> createGetWSVersionResponse(GetWSVersionResponse value) { return new JAXBElement<GetWSVersionResponse>(_GetWSVersionResponse_QNAME, GetWSVersionResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetReposInfo }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getReposInfo") public JAXBElement<GetReposInfo> createGetReposInfo(GetReposInfo value) { return new JAXBElement<GetReposInfo>(_GetReposInfo_QNAME, GetReposInfo.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSXMLParserException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSXMLParserException") public JAXBElement<HIWSXMLParserException> createHIWSXMLParserException(HIWSXMLParserException value) { return new JAXBElement<HIWSXMLParserException>(_HIWSXMLParserException_QNAME, HIWSXMLParserException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link AuthenticateResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "authenticateResponse") public JAXBElement<AuthenticateResponse> createAuthenticateResponse(AuthenticateResponse value) { return new JAXBElement<AuthenticateResponse>(_AuthenticateResponse_QNAME, AuthenticateResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetDataResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetDataResponse") public JAXBElement<GetAssetDataResponse> createGetAssetDataResponse(GetAssetDataResponse value) { return new JAXBElement<GetAssetDataResponse>(_GetAssetDataResponse_QNAME, GetAssetDataResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetHierarchyLevel }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getHierarchyLevel") public JAXBElement<GetHierarchyLevel> createGetHierarchyLevel(GetHierarchyLevel value) { return new JAXBElement<GetHierarchyLevel>(_GetHierarchyLevel_QNAME, GetHierarchyLevel.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetReposInfoResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getReposInfoResponse") public JAXBElement<GetReposInfoResponse> createGetReposInfoResponse(GetReposInfoResponse value) { return new JAXBElement<GetReposInfoResponse>(_GetReposInfoResponse_QNAME, GetReposInfoResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link byte[]}{@code >}} * */ @XmlElementDecl(namespace = "", name = "return", scope = GetAssetPreviewDataResponse.class) public JAXBElement<byte[]> createGetAssetPreviewDataResponseReturn(byte[] value) { return new JAXBElement<byte[]>(_GetAssetPreviewDataResponseReturn_QNAME, byte[].class, GetAssetPreviewDataResponse.class, ((byte[]) value)); } }
bitgilde/HyperImage3
hi3-editor/src/org/hyperimage/connector/fedora3/ws/ObjectFactory.java
Java
apache-2.0
21,077
/* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.hbase; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import java.util.ListIterator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.mapreduce.TableRecordReader; import org.apache.hadoop.hbase.mapreduce.TableSplit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.InputSplit; public class HBaseTableInputFormat extends TableInputFormat { private static final Log LOG = LogFactory.getLog(HBaseTableInputFormat.class); protected final byte[] gt_; protected final byte[] gte_; protected final byte[] lt_; protected final byte[] lte_; public HBaseTableInputFormat() { this(-1, null, null, null, null); } protected HBaseTableInputFormat(long limit, byte[] gt, byte[] gte, byte[] lt, byte[] lte) { super(); setTableRecordReader(new HBaseTableRecordReader(limit)); gt_ = gt; gte_ = gte; lt_ = lt; lte_ = lte; } public static class HBaseTableIFBuilder { protected byte[] gt_; protected byte[] gte_; protected byte[] lt_; protected byte[] lte_; protected long limit_; protected Configuration conf_; public HBaseTableIFBuilder withGt(byte[] gt) { gt_ = gt; return this; } public HBaseTableIFBuilder withGte(byte[] gte) { gte_ = gte; return this; } public HBaseTableIFBuilder withLt(byte[] lt) { lt_ = lt; return this; } public HBaseTableIFBuilder withLte(byte[] lte) { lte_ = lte; return this; } public HBaseTableIFBuilder withLimit(long limit) { limit_ = limit; return this; } public HBaseTableIFBuilder withConf(Configuration conf) { conf_ = conf; return this; } public HBaseTableInputFormat build() { HBaseTableInputFormat inputFormat = new HBaseTableInputFormat(limit_, gt_, gte_, lt_, lte_); if (conf_ != null) inputFormat.setConf(conf_); return inputFormat; } } @Override public List<InputSplit> getSplits(org.apache.hadoop.mapreduce.JobContext context) throws IOException { List<InputSplit> splits = super.getSplits(context); ListIterator<InputSplit> splitIter = splits.listIterator(); while (splitIter.hasNext()) { TableSplit split = (TableSplit) splitIter.next(); byte[] startKey = split.getStartRow(); byte[] endKey = split.getEndRow(); // Skip if the region doesn't satisfy configured options. if ((skipRegion(CompareOp.LESS, startKey, lt_)) || (skipRegion(CompareOp.GREATER, endKey, gt_)) || (skipRegion(CompareOp.GREATER, endKey, gte_)) || (skipRegion(CompareOp.LESS_OR_EQUAL, startKey, lte_)) ) { splitIter.remove(); } } return splits; } private boolean skipRegion(CompareOp op, byte[] key, byte[] option ) throws IOException { if (key.length == 0 || option == null) return false; BinaryComparator comp = new BinaryComparator(option); RowFilter rowFilter = new RowFilter(op, comp); return rowFilter.filterRowKey(key, 0, key.length); } protected class HBaseTableRecordReader extends TableRecordReader { private long recordsSeen = 0; private final long limit_; private byte[] startRow_; private byte[] endRow_; private transient byte[] currRow_; private int maxRowLength; private BigInteger bigStart_; private BigInteger bigEnd_; private BigDecimal bigRange_; private transient float progressSoFar_ = 0; public HBaseTableRecordReader(long limit) { limit_ = limit; } @Override public void setScan(Scan scan) { super.setScan(scan); startRow_ = scan.getStartRow(); endRow_ = scan.getStopRow(); byte[] startPadded; byte[] endPadded; if (startRow_.length < endRow_.length) { startPadded = Bytes.padTail(startRow_, endRow_.length - startRow_.length); endPadded = endRow_; } else if (endRow_.length < startRow_.length) { startPadded = startRow_; endPadded = Bytes.padTail(endRow_, startRow_.length - endRow_.length); } else { startPadded = startRow_; endPadded = endRow_; } currRow_ = startRow_; byte [] prependHeader = {1, 0}; bigStart_ = new BigInteger(Bytes.add(prependHeader, startPadded)); bigEnd_ = new BigInteger(Bytes.add(prependHeader, endPadded)); bigRange_ = new BigDecimal(bigEnd_.subtract(bigStart_)); maxRowLength = endRow_.length > startRow_.length ? endRow_.length : startRow_.length; LOG.info("setScan with ranges: " + bigStart_ + " - " + bigEnd_ + " ( " + bigRange_ + ")"); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (limit_ > 0 && ++recordsSeen > limit_) { return false; } boolean hasMore = super.nextKeyValue(); if (hasMore) { currRow_ = getCurrentKey().get(); } return hasMore; } @Override public float getProgress() { if (currRow_ == null || currRow_.length == 0 || endRow_.length == 0 || endRow_ == HConstants.LAST_ROW) { return 0; } byte[] lastPadded = currRow_; if(maxRowLength > currRow_.length) { lastPadded = Bytes.padTail(currRow_, maxRowLength - currRow_.length); } byte [] prependHeader = {1, 0}; BigInteger bigLastRow = new BigInteger(Bytes.add(prependHeader, lastPadded)); if (bigLastRow.compareTo(bigEnd_) > 0) { return progressSoFar_; } BigDecimal processed = new BigDecimal(bigLastRow.subtract(bigStart_)); try { BigDecimal progress = processed.setScale(3).divide(bigRange_, BigDecimal.ROUND_HALF_DOWN); progressSoFar_ = progress.floatValue(); return progressSoFar_; } catch (java.lang.ArithmeticException e) { return 0; } } } }
apache/pig
src/org/apache/pig/backend/hadoop/hbase/HBaseTableInputFormat.java
Java
apache-2.0
7,754
// Copyright (c) 2015 Alachisoft // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace Alachisoft.NCache.Web.Command { internal sealed class CommandOptions { private CommandOptions() { } internal const string EXC_INITIAL = "EXCEPTION"; } }
modulexcite/NCache
Src/NCWebCache/Web/RemoteClient/Command/CommandOptions.cs
C#
apache-2.0
771
package app import ( "net/http" "time" "golang.org/x/net/context" "github.com/weaveworks/scope/probe/host" "github.com/weaveworks/scope/report" ) // Raw report handler func makeRawReportHandler(rep Reporter) CtxHandlerFunc { return func(ctx context.Context, w http.ResponseWriter, r *http.Request) { report, err := rep.Report(ctx, time.Now()) if err != nil { respondWith(w, http.StatusInternalServerError, err) return } respondWith(w, http.StatusOK, report) } } type probeDesc struct { ID string `json:"id"` Hostname string `json:"hostname"` Version string `json:"version"` LastSeen time.Time `json:"lastSeen"` } // Probe handler func makeProbeHandler(rep Reporter) CtxHandlerFunc { return func(ctx context.Context, w http.ResponseWriter, r *http.Request) { r.ParseForm() if _, sparse := r.Form["sparse"]; sparse { // if we have reports, we must have connected probes hasProbes, err := rep.HasReports(ctx, time.Now()) if err != nil { respondWith(w, http.StatusInternalServerError, err) } respondWith(w, http.StatusOK, hasProbes) return } rpt, err := rep.Report(ctx, time.Now()) if err != nil { respondWith(w, http.StatusInternalServerError, err) return } result := []probeDesc{} for _, n := range rpt.Host.Nodes { id, _ := n.Latest.Lookup(report.ControlProbeID) hostname, _ := n.Latest.Lookup(host.HostName) version, dt, _ := n.Latest.LookupEntry(host.ScopeVersion) result = append(result, probeDesc{ ID: id, Hostname: hostname, Version: version, LastSeen: dt, }) } respondWith(w, http.StatusOK, result) } }
kinvolk/scope
app/api_report.go
GO
apache-2.0
1,643
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.maybe; import io.reactivex.*; import io.reactivex.disposables.Disposable; import io.reactivex.internal.disposables.DisposableHelper; import io.reactivex.internal.fuseable.HasUpstreamMaybeSource; import java.util.concurrent.atomic.AtomicReference; /** * Subscribes to the other source if the main source is empty. * * @param <T> the value type */ public final class MaybeSwitchIfEmptySingle<T> extends Single<T> implements HasUpstreamMaybeSource<T> { final MaybeSource<T> source; final SingleSource<? extends T> other; public MaybeSwitchIfEmptySingle(MaybeSource<T> source, SingleSource<? extends T> other) { this.source = source; this.other = other; } @Override public MaybeSource<T> source() { return source; } @Override protected void subscribeActual(SingleObserver<? super T> observer) { source.subscribe(new SwitchIfEmptyMaybeObserver<T>(observer, other)); } static final class SwitchIfEmptyMaybeObserver<T> extends AtomicReference<Disposable> implements MaybeObserver<T>, Disposable { private static final long serialVersionUID = 4603919676453758899L; final SingleObserver<? super T> downstream; final SingleSource<? extends T> other; SwitchIfEmptyMaybeObserver(SingleObserver<? super T> actual, SingleSource<? extends T> other) { this.downstream = actual; this.other = other; } @Override public void dispose() { DisposableHelper.dispose(this); } @Override public boolean isDisposed() { return DisposableHelper.isDisposed(get()); } @Override public void onSubscribe(Disposable d) { if (DisposableHelper.setOnce(this, d)) { downstream.onSubscribe(this); } } @Override public void onSuccess(T value) { downstream.onSuccess(value); } @Override public void onError(Throwable e) { downstream.onError(e); } @Override public void onComplete() { Disposable d = get(); if (d != DisposableHelper.DISPOSED) { if (compareAndSet(d, null)) { other.subscribe(new OtherSingleObserver<T>(downstream, this)); } } } static final class OtherSingleObserver<T> implements SingleObserver<T> { final SingleObserver<? super T> downstream; final AtomicReference<Disposable> parent; OtherSingleObserver(SingleObserver<? super T> actual, AtomicReference<Disposable> parent) { this.downstream = actual; this.parent = parent; } @Override public void onSubscribe(Disposable d) { DisposableHelper.setOnce(parent, d); } @Override public void onSuccess(T value) { downstream.onSuccess(value); } @Override public void onError(Throwable e) { downstream.onError(e); } } } }
NiteshKant/RxJava
src/main/java/io/reactivex/internal/operators/maybe/MaybeSwitchIfEmptySingle.java
Java
apache-2.0
3,840
# ------------------------------------------------------------- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # ------------------------------------------------------------- # Autogenerated By : src/main/python/generator/generator.py # Autogenerated From : scripts/builtin/garch.dml from typing import Dict, Iterable from systemds.operator import OperationNode, Matrix, Frame, List, MultiReturn, Scalar from systemds.script_building.dag import OutputType from systemds.utils.consts import VALID_INPUT_TYPES def garch(X: Matrix, kmax: int, momentum: float, start_stepsize: float, end_stepsize: float, start_vicinity: float, end_vicinity: float, sim_seed: int, verbose: bool): """ :param X: The input Matrix to apply Arima on. :param kmax: Number of iterations :param momentum: Momentum for momentum-gradient descent (set to 0 to deactivate) :param start_stepsize: Initial gradient-descent stepsize :param end_stepsize: gradient-descent stepsize at end (linear descent) :param start_vicinity: proportion of randomness of restart-location for gradient descent at beginning :param end_vicinity: same at end (linear decay) :param sim_seed: seed for simulation of process on fitted coefficients :param verbose: verbosity, comments during fitting :return: 'OperationNode' containing simulated garch(1,1) process on fitted coefficients & variances of simulated fitted process & constant term of fitted process & 1-st arch-coefficient of fitted process & 1-st garch-coefficient of fitted process & drawbacks: slow convergence of optimization (sort of simulated annealing/gradient descent) """ params_dict = {'X': X, 'kmax': kmax, 'momentum': momentum, 'start_stepsize': start_stepsize, 'end_stepsize': end_stepsize, 'start_vicinity': start_vicinity, 'end_vicinity': end_vicinity, 'sim_seed': sim_seed, 'verbose': verbose} vX_0 = Matrix(X.sds_context, '') vX_1 = Matrix(X.sds_context, '') vX_2 = Scalar(X.sds_context, '') vX_3 = Scalar(X.sds_context, '') vX_4 = Scalar(X.sds_context, '') output_nodes = [vX_0, vX_1, vX_2, vX_3, vX_4, ] op = MultiReturn(X.sds_context, 'garch', output_nodes, named_input_nodes=params_dict) vX_0._unnamed_input_nodes = [op] vX_1._unnamed_input_nodes = [op] vX_2._unnamed_input_nodes = [op] vX_3._unnamed_input_nodes = [op] vX_4._unnamed_input_nodes = [op] return op
apache/incubator-systemml
src/main/python/systemds/operator/algorithm/builtin/garch.py
Python
apache-2.0
3,230
package org.hl7.fhir.dstu3.model.codesystems; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0 import org.hl7.fhir.dstu3.model.EnumFactory; public class ObservationStatusEnumFactory implements EnumFactory<ObservationStatus> { public ObservationStatus fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) return null; if ("registered".equals(codeString)) return ObservationStatus.REGISTERED; if ("preliminary".equals(codeString)) return ObservationStatus.PRELIMINARY; if ("final".equals(codeString)) return ObservationStatus.FINAL; if ("amended".equals(codeString)) return ObservationStatus.AMENDED; if ("cancelled".equals(codeString)) return ObservationStatus.CANCELLED; if ("entered-in-error".equals(codeString)) return ObservationStatus.ENTEREDINERROR; if ("unknown".equals(codeString)) return ObservationStatus.UNKNOWN; throw new IllegalArgumentException("Unknown ObservationStatus code '"+codeString+"'"); } public String toCode(ObservationStatus code) { if (code == ObservationStatus.REGISTERED) return "registered"; if (code == ObservationStatus.PRELIMINARY) return "preliminary"; if (code == ObservationStatus.FINAL) return "final"; if (code == ObservationStatus.AMENDED) return "amended"; if (code == ObservationStatus.CANCELLED) return "cancelled"; if (code == ObservationStatus.ENTEREDINERROR) return "entered-in-error"; if (code == ObservationStatus.UNKNOWN) return "unknown"; return "?"; } public String toSystem(ObservationStatus code) { return code.getSystem(); } }
Gaduo/hapi-fhir
hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/codesystems/ObservationStatusEnumFactory.java
Java
apache-2.0
3,374
/* * Copyright 2013 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.examples.nqueens.app; import java.io.File; import org.junit.Test; import org.optaplanner.benchmark.api.PlannerBenchmarkException; import org.optaplanner.benchmark.api.PlannerBenchmarkFactory; import org.optaplanner.benchmark.config.PlannerBenchmarkConfig; import org.optaplanner.examples.common.app.PlannerBenchmarkTest; public class BrokenNQueensBenchmarkTest extends PlannerBenchmarkTest { @Override protected String createBenchmarkConfigResource() { return "org/optaplanner/examples/nqueens/benchmark/nqueensBenchmarkConfig.xml"; } @Override protected PlannerBenchmarkFactory buildPlannerBenchmarkFactory(File unsolvedDataFile) { PlannerBenchmarkFactory benchmarkFactory = super.buildPlannerBenchmarkFactory(unsolvedDataFile); PlannerBenchmarkConfig benchmarkConfig = benchmarkFactory.getPlannerBenchmarkConfig(); benchmarkConfig.setWarmUpSecondsSpentLimit(0L); benchmarkConfig.getInheritedSolverBenchmarkConfig().getSolverConfig().getTerminationConfig() .setStepCountLimit(-100); // Intentionally crash the solver return benchmarkFactory; } // ************************************************************************ // Tests // ************************************************************************ @Test(timeout = 100000, expected = PlannerBenchmarkException.class) public void benchmarkBroken8queens() { runBenchmarkTest(new File("data/nqueens/unsolved/8queens.xml")); } }
gsheldon/optaplanner
optaplanner-examples/src/test/java/org/optaplanner/examples/nqueens/app/BrokenNQueensBenchmarkTest.java
Java
apache-2.0
2,156
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.monitor.util.celltypes; import java.io.Serializable; import java.util.Comparator; public abstract class CellType<T> implements Comparator<T>, Serializable { private static final long serialVersionUID = 1L; private boolean sortable = true; abstract public String alignment(); abstract public String format(Object obj); public final void setSortable(boolean sortable) { this.sortable = sortable; } public final boolean isSortable() { return sortable; } }
adamjshook/accumulo
server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/CellType.java
Java
apache-2.0
1,314
package io.cattle.platform.process.dao.impl; import static io.cattle.platform.core.model.tables.AccountTable.*; import io.cattle.platform.core.model.Account; import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao; import io.cattle.platform.process.dao.AccountDao; public class AccountDaoImpl extends AbstractJooqDao implements AccountDao { @Override public Account findByUuid(String uuid) { return create() .selectFrom(ACCOUNT) .where(ACCOUNT.UUID.eq(uuid)) .fetchOne(); } }
cloudnautique/cloud-cattle
code/iaas/logic/src/main/java/io/cattle/platform/process/dao/impl/AccountDaoImpl.java
Java
apache-2.0
551
package net.ros.client.render; import com.google.common.collect.ImmutableList; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.block.model.BakedQuad; import net.minecraft.client.renderer.block.model.IBakedModel; import net.minecraft.client.renderer.block.model.ItemCameraTransforms; import net.minecraft.client.renderer.block.model.ItemOverrideList; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.entity.EntityLivingBase; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.world.World; import net.ros.client.render.model.ModelCacheManager; import net.ros.client.render.model.obj.PipeOBJStates; import net.ros.client.render.model.obj.ROSOBJState; import net.ros.common.block.BlockPipeBase; import org.apache.commons.lang3.tuple.Pair; import javax.annotation.Nonnull; import javax.vecmath.Matrix4f; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class ModelPipeInventory implements IBakedModel { private final Map<ROSOBJState, CompositeBakedModel> CACHE = new HashMap<>(); private final BlockPipeBase pipeBlock; public ModelPipeInventory(BlockPipeBase pipeBlock) { this.pipeBlock = pipeBlock; } @Nonnull @Override public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand) { return Collections.emptyList(); } private CompositeBakedModel getModel(ROSOBJState pipeState) { if (CACHE.containsKey(pipeState)) return CACHE.get(pipeState); else { CompositeBakedModel model = new CompositeBakedModel(ModelCacheManager.getPipeQuads(pipeBlock, pipeState), Minecraft.getMinecraft().getBlockRendererDispatcher() .getModelForState(pipeBlock.getDefaultState())); CACHE.put(pipeState, model); return model; } } @Nonnull @Override public ItemOverrideList getOverrides() { return itemHandler; } @Override public boolean isAmbientOcclusion() { return false; } @Override public boolean isGui3d() { return true; } @Override public boolean isBuiltInRenderer() { return false; } @Nonnull @Override public TextureAtlasSprite getParticleTexture() { return Minecraft.getMinecraft().getTextureMapBlocks().getAtlasSprite("minecraft:blocks/dirt"); } @Nonnull @Override public ItemCameraTransforms getItemCameraTransforms() { return ItemCameraTransforms.DEFAULT; } private static class CompositeBakedModel implements IBakedModel { private IBakedModel pipeModel; private final List<BakedQuad> genQuads; CompositeBakedModel(List<BakedQuad> pipeQuads, IBakedModel pipeModel) { this.pipeModel = pipeModel; ImmutableList.Builder<BakedQuad> genBuilder = ImmutableList.builder(); genBuilder.addAll(pipeQuads); genQuads = genBuilder.build(); } @Nonnull @Override public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand) { return face == null ? genQuads : Collections.emptyList(); } @Override public boolean isAmbientOcclusion() { return pipeModel.isAmbientOcclusion(); } @Override public boolean isGui3d() { return pipeModel.isGui3d(); } @Override public boolean isBuiltInRenderer() { return pipeModel.isBuiltInRenderer(); } @Nonnull @Override public TextureAtlasSprite getParticleTexture() { return pipeModel.getParticleTexture(); } @Nonnull @Override public ItemOverrideList getOverrides() { return ItemOverrideList.NONE; } @Override public Pair<? extends IBakedModel, Matrix4f> handlePerspective(ItemCameraTransforms.TransformType cameraTransformType) { return Pair.of(this, pipeModel.handlePerspective(cameraTransformType).getRight()); } } private final ItemOverrideList itemHandler = new ItemOverrideList(ImmutableList.of()) { @Nonnull @Override public IBakedModel handleItemState(@Nonnull IBakedModel model, ItemStack stack, World world, EntityLivingBase entity) { return ModelPipeInventory.this.getModel(PipeOBJStates.getVisibilityState( pipeBlock.getPipeType().getSize(), EnumFacing.WEST, EnumFacing.EAST)); } }; }
mantal/Qbar
content/logistic/src/main/java/net/ros/client/render/ModelPipeInventory.java
Java
apache-2.0
4,971
import React, { Component, Fragment } from 'react'; import { navigate } from '@reach/router'; import PropTypes from 'prop-types'; import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap'; import { siteRoot, gettext, orgID } from '../../utils/constants'; import { seafileAPI } from '../../utils/seafile-api'; import { Utils } from '../../utils/utils'; import toaster from '../../components/toast'; import OrgGroupInfo from '../../models/org-group'; import MainPanelTopbar from './main-panel-topbar'; class Search extends React.Component { constructor(props) { super(props); this.state = { value: '' }; } handleInputChange = (e) => { this.setState({ value: e.target.value }); } handleKeyPress = (e) => { if (e.key == 'Enter') { e.preventDefault(); this.handleSubmit(); } } handleSubmit = () => { const value = this.state.value.trim(); if (!value) { return false; } this.props.submit(value); } render() { return ( <div className="input-icon"> <i className="d-flex input-icon-addon fas fa-search"></i> <input type="text" className="form-control search-input h-6 mr-1" style={{width: '15rem'}} placeholder={this.props.placeholder} value={this.state.value} onChange={this.handleInputChange} onKeyPress={this.handleKeyPress} autoComplete="off" /> </div> ); } } class OrgGroups extends Component { constructor(props) { super(props); this.state = { page: 1, pageNext: false, orgGroups: [], isItemFreezed: false }; } componentDidMount() { let page = this.state.page; this.initData(page); } initData = (page) => { seafileAPI.orgAdminListOrgGroups(orgID, page).then(res => { let orgGroups = res.data.groups.map(item => { return new OrgGroupInfo(item); }); this.setState({ orgGroups: orgGroups, pageNext: res.data.page_next, page: res.data.page, }); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } onChangePageNum = (e, num) => { e.preventDefault(); let page = this.state.page; if (num == 1) { page = page + 1; } else { page = page - 1; } this.initData(page); } onFreezedItem = () => { this.setState({isItemFreezed: true}); } onUnfreezedItem = () => { this.setState({isItemFreezed: false}); } deleteGroupItem = (group) => { seafileAPI.orgAdminDeleteOrgGroup(orgID, group.id).then(res => { this.setState({ orgGroups: this.state.orgGroups.filter(item => item.id != group.id) }); let msg = gettext('Successfully deleted {name}'); msg = msg.replace('{name}', group.groupName); toaster.success(msg); }).catch(error => { let errMessage = Utils.getErrorMsg(error); toaster.danger(errMessage); }); } searchItems = (keyword) => { navigate(`${siteRoot}org/groupadmin/search-groups/?query=${encodeURIComponent(keyword)}`); } getSearch = () => { return <Search placeholder={gettext('Search groups by name')} submit={this.searchItems} />; } render() { let groups = this.state.orgGroups; return ( <Fragment> <MainPanelTopbar search={this.getSearch()}/> <div className="main-panel-center flex-row"> <div className="cur-view-container"> <div className="cur-view-path"> <h3 className="sf-heading">{gettext('All Groups')}</h3> </div> <div className="cur-view-content"> <table> <thead> <tr> <th width="30%">{gettext('Name')}</th> <th width="35%">{gettext('Creator')}</th> <th width="23%">{gettext('Created At')}</th> <th width="12%" className="text-center">{gettext('Operations')}</th> </tr> </thead> <tbody> {groups.map(item => { return ( <GroupItem key={item.id} group={item} isItemFreezed={this.state.isItemFreezed} onFreezedItem={this.onFreezedItem} onUnfreezedItem={this.onUnfreezedItem} deleteGroupItem={this.deleteGroupItem} /> ); })} </tbody> </table> <div className="paginator"> {this.state.page != 1 && <a href="#" onClick={(e) => this.onChangePageNum(e, -1)}>{gettext('Previous')}</a>} {(this.state.page != 1 && this.state.pageNext) && <span> | </span>} {this.state.pageNext && <a href="#" onClick={(e) => this.onChangePageNum(e, 1)}>{gettext('Next')}</a>} </div> </div> </div> </div> </Fragment> ); } } const GroupItemPropTypes = { group: PropTypes.object.isRequired, isItemFreezed: PropTypes.bool.isRequired, onFreezedItem: PropTypes.func.isRequired, onUnfreezedItem: PropTypes.func.isRequired, deleteGroupItem: PropTypes.func.isRequired, }; class GroupItem extends React.Component { constructor(props) { super(props); this.state = { highlight: false, showMenu: false, isItemMenuShow: false }; } onMouseEnter = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: true, highlight: true, }); } } onMouseLeave = () => { if (!this.props.isItemFreezed) { this.setState({ showMenu: false, highlight: false }); } } onDropdownToggleClick = (e) => { e.preventDefault(); this.toggleOperationMenu(e); } toggleOperationMenu = (e) => { e.stopPropagation(); this.setState( {isItemMenuShow: !this.state.isItemMenuShow }, () => { if (this.state.isItemMenuShow) { this.props.onFreezedItem(); } else { this.setState({ highlight: false, showMenu: false, }); this.props.onUnfreezedItem(); } } ); } toggleDelete = () => { this.props.deleteGroupItem(this.props.group); } renderGroupHref = (group) => { let groupInfoHref; if (group.creatorName == 'system admin') { groupInfoHref = siteRoot + 'org/departmentadmin/groups/' + group.id + '/'; } else { groupInfoHref = siteRoot + 'org/groupadmin/' + group.id + '/'; } return groupInfoHref; } renderGroupCreator = (group) => { let userInfoHref = siteRoot + 'org/useradmin/info/' + group.creatorEmail + '/'; if (group.creatorName == 'system admin') { return ( <td> -- </td> ); } else { return( <td> <a href={userInfoHref} className="font-weight-normal">{group.creatorName}</a> </td> ); } } render() { let { group } = this.props; let isOperationMenuShow = (group.creatorName != 'system admin') && this.state.showMenu; return ( <tr className={this.state.highlight ? 'tr-highlight' : ''} onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave}> <td> <a href={this.renderGroupHref(group)} className="font-weight-normal">{group.groupName}</a> </td> {this.renderGroupCreator(group)} <td>{group.ctime}</td> <td className="text-center cursor-pointer"> {isOperationMenuShow && <Dropdown isOpen={this.state.isItemMenuShow} toggle={this.toggleOperationMenu}> <DropdownToggle tag="a" className="attr-action-icon fas fa-ellipsis-v" title={gettext('More Operations')} data-toggle="dropdown" aria-expanded={this.state.isItemMenuShow} onClick={this.onDropdownToggleClick} /> <DropdownMenu> <DropdownItem onClick={this.toggleDelete}>{gettext('Delete')}</DropdownItem> </DropdownMenu> </Dropdown> } </td> </tr> ); } } GroupItem.propTypes = GroupItemPropTypes; export default OrgGroups;
miurahr/seahub
frontend/src/pages/org-admin/org-groups.js
JavaScript
apache-2.0
8,470
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package io.github.jass2125.locadora.jpa; import javax.persistence.EntityManager; import javax.persistence.Persistence; /** * * @author Anderson Souza * @email jair_anderson_bs@hotmail.com * @since 2015, Feb 9, 2016 */ public class EntityManagerJPA { private static EntityManager em; private EntityManagerJPA() { } public static EntityManager getEntityManager(){ if(em == null) { em = Persistence.createEntityManagerFactory("default").createEntityManager(); } return em; } }
ifpb-disciplinas-2015-2/locadora-jpa-web
src/main/java/io/github/jass2125/locadora/jpa/EntityManagerJPA.java
Java
apache-2.0
740
import {boolean, number, object, text, withKnobs} from '@storybook/addon-knobs'; import { BentoAccordion, BentoAccordionContent, BentoAccordionHeader, BentoAccordionSection, } from '#bento/components/bento-accordion/1.0/component'; import {BentoVideo} from '#bento/components/bento-video/1.0/component'; import * as Preact from '#preact'; import '#bento/components/bento-video/1.0/component.jss'; export default { title: 'Video', component: BentoVideo, decorators: [withKnobs], }; const VideoTagPlayer = ({i}) => { const group = `Player ${i + 1}`; const width = text('width', '640px', group); const height = text('height', '360px', group); const ariaLabel = text('aria-label', 'Video Player', group); const autoplay = boolean('autoplay', true, group); const controls = boolean('controls', true, group); const mediasession = boolean('mediasession', true, group); const noaudio = boolean('noaudio', false, group); const loop = boolean('loop', false, group); const poster = text( 'poster', 'https://amp.dev/static/inline-examples/images/kitten-playing.png', group ); const artist = text('artist', '', group); const album = text('album', '', group); const artwork = text('artwork', '', group); const title = text('title', '', group); const sources = object( 'sources', [ { src: 'https://amp.dev/static/inline-examples/videos/kitten-playing.webm', type: 'video/webm', }, { src: 'https://amp.dev/static/inline-examples/videos/kitten-playing.mp4', type: 'video/mp4', }, ], group ); return ( <BentoVideo component="video" aria-label={ariaLabel} autoplay={autoplay} controls={controls} mediasession={mediasession} noaudio={noaudio} loop={loop} poster={poster} artist={artist} album={album} artwork={artwork} title={title} style={{width, height}} sources={sources.map((props) => ( <source {...props}></source> ))} /> ); }; const Spacer = ({height}) => { return ( <div style={{ height, background: `linear-gradient(to bottom, #bbb, #bbb 10%, #fff 10%, #fff)`, backgroundSize: '100% 10px', }} ></div> ); }; export const Default = () => { const amount = number('Amount', 1, {}, 'Page'); const spacerHeight = text('Space', '80vh', 'Page'); const spaceAbove = boolean('Space above', false, 'Page'); const spaceBelow = boolean('Space below', false, 'Page'); const players = []; for (let i = 0; i < amount; i++) { players.push(<VideoTagPlayer key={i} i={i} />); if (i < amount - 1) { players.push(<Spacer height={spacerHeight} />); } } return ( <> {spaceAbove && <Spacer height={spacerHeight} />} {players} {spaceBelow && <Spacer height={spacerHeight} />} </> ); }; export const InsideAccordion = () => { const width = text('width', '320px'); const height = text('height', '180px'); return ( <BentoAccordion expandSingleSection> <BentoAccordionSection key={1} expanded> <BentoAccordionHeader> <h2>Controls</h2> </BentoAccordionHeader> <BentoAccordionContent> <BentoVideo component="video" controls={true} loop={true} style={{width, height}} src="https://amp.dev/static/inline-examples/videos/kitten-playing.mp4" poster="https://amp.dev/static/inline-examples/images/kitten-playing.png" /> </BentoAccordionContent> </BentoAccordionSection> <BentoAccordionSection key={2}> <BentoAccordionHeader> <h2>Autoplay</h2> </BentoAccordionHeader> <BentoAccordionContent> <BentoVideo component="video" autoplay={true} loop={true} style={{width, height}} src="https://amp.dev/static/inline-examples/videos/kitten-playing.mp4" poster="https://amp.dev/static/inline-examples/images/kitten-playing.png" sources={[ <source type="video/mp4" src="https://amp.dev/static/inline-examples/videos/kitten-playing.mp4" />, ]} /> </BentoAccordionContent> </BentoAccordionSection> </BentoAccordion> ); };
ampproject/amphtml
extensions/amp-video/1.0/storybook/Basic.js
JavaScript
apache-2.0
4,420
/******************************************************************************* * Copyright 2012 Apigee Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.usergrid.persistence.query.tree; import org.antlr.runtime.Token; import org.usergrid.persistence.exceptions.PersistenceException; /** * @author tnine * */ public class ContainsOperand extends Operand { /** * @param property * @param literal */ public ContainsOperand(Token t) { super(t); } /* (non-Javadoc) * @see org.usergrid.persistence.query.tree.Operand#visit(org.usergrid.persistence.query.tree.QueryVisitor) */ @Override public void visit(QueryVisitor visitor) throws PersistenceException { visitor.visit(this); } public void setProperty(String name){ setChild(0, new Property(name)); } public void setValue(String value){ setChild(1, new StringLiteral(value)); } public Property getProperty(){ return (Property) this.children.get(0); } public StringLiteral getString(){ return (StringLiteral) this.children.get(1); } }
futur/usergrid-stack
core/src/main/java/org/usergrid/persistence/query/tree/ContainsOperand.java
Java
apache-2.0
1,697
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.api.subtree; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.model.subtree.SubtreeSpecification; import org.apache.directory.server.core.api.event.Evaluator; import org.apache.directory.server.core.api.event.ExpressionEvaluator; /** * An evaluator used to determine if an entry is included in the collection * represented by a subtree specification. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class SubtreeEvaluator { /** A refinement filter evaluator */ private final Evaluator evaluator; /** * Creates a subtreeSpecification evaluatior which can be used to determine * if an entry is included within the collection of a subtree. * * @param schemaManager The server schemaManager */ public SubtreeEvaluator( SchemaManager schemaManager ) { evaluator = new ExpressionEvaluator( schemaManager ); } /** * Determines if an entry is selected by a subtree specification. * * @param subtree the subtree specification * @param apDn the distinguished name of the administrative point containing the subentry * @param entryDn the distinguished name of the candidate entry * @param entry The entry to evaluate * @return true if the entry is selected by the specification, false if it is not * @throws LdapException if errors are encountered while evaluating selection */ public boolean evaluate( SubtreeSpecification subtree, Dn apDn, Dn entryDn, Entry entry ) throws LdapException { /* ===================================================================== * NOTE: Regarding the overall approach, we try to narrow down the * possibilities by slowly pruning relative names off of the entryDn. * For example we check first if the entry is a descendant of the AP. * If so we use the relative name thereafter to calculate if it is * a descendant of the base. This means shorter names to compare and * less work to do while we continue to deduce inclusion by the subtree * specification. * ===================================================================== */ // First construct the subtree base, which is the concatenation of the // AP Dn and the subentry base Dn subentryBaseDn = apDn; subentryBaseDn = subentryBaseDn.add( subtree.getBase() ); if ( !entryDn.isDescendantOf( subentryBaseDn ) ) { // The entry Dn is not part of the subtree specification, get out return false; } /* * Evaluate based on minimum and maximum chop values. Here we simply * need to compare the distances respectively with the size of the * baseRelativeRdn. For the max distance entries with a baseRelativeRdn * size greater than the max distance are rejected. For the min distance * entries with a baseRelativeRdn size less than the minimum distance * are rejected. */ int entryRelativeDnSize = entryDn.size() - subentryBaseDn.size(); if ( ( subtree.getMaxBaseDistance() != SubtreeSpecification.UNBOUNDED_MAX ) && ( entryRelativeDnSize > subtree.getMaxBaseDistance() ) ) { return false; } if ( ( subtree.getMinBaseDistance() > 0 ) && ( entryRelativeDnSize < subtree.getMinBaseDistance() ) ) { return false; } /* * For specific exclusions we must iterate through the set and check * if the baseRelativeRdn is a descendant of the exclusion. The * isDescendant() function will return true if the compared names * are equal so for chopAfter exclusions we must check for equality * as well and reject if the relative names are equal. */ // Now, get the entry's relative part if ( !subtree.getChopBeforeExclusions().isEmpty() || !subtree.getChopAfterExclusions().isEmpty() ) { Dn entryRelativeDn = entryDn.getDescendantOf( apDn ).getDescendantOf( subtree.getBase() ); for ( Dn chopBeforeDn : subtree.getChopBeforeExclusions() ) { if ( entryRelativeDn.isDescendantOf( chopBeforeDn ) ) { return false; } } for ( Dn chopAfterDn : subtree.getChopAfterExclusions() ) { if ( entryRelativeDn.isDescendantOf( chopAfterDn ) && !chopAfterDn.equals( entryRelativeDn ) ) { return false; } } } /* * The last remaining step is to check and see if the refinement filter * selects the entry candidate based on objectClass attribute values. * To do this we invoke the refinement evaluator members evaluate() method. */ if ( subtree.getRefinement() != null ) { return evaluator.evaluate( subtree.getRefinement(), entryDn, entry ); } /* * If nothing has rejected the candidate entry and there is no refinement * filter then the entry is included in the collection represented by the * subtree specification so we return true. */ return true; } }
apache/directory-server
core-api/src/main/java/org/apache/directory/server/core/api/subtree/SubtreeEvaluator.java
Java
apache-2.0
6,471
namespace SmartyStreets { public class RequestEntityTooLargeException : SmartyException { public RequestEntityTooLargeException() { } public RequestEntityTooLargeException(string message) : base(message) { } } }
smartystreets/smartystreets-csharp-sdk
src/sdk/Exceptions/RequestEntityTooLargeException.cs
C#
apache-2.0
233
package io.cattle.platform.configitem.server.model.impl; import java.io.IOException; import io.cattle.platform.configitem.server.model.RefreshableConfigItem; import io.cattle.platform.configitem.server.resource.ResourceRoot; import io.cattle.platform.configitem.version.ConfigItemStatusManager; public abstract class AbstractResourceRootConfigItem extends AbstractConfigItem implements RefreshableConfigItem { ResourceRoot resourceRoot; public AbstractResourceRootConfigItem(String name, ConfigItemStatusManager versionManager, ResourceRoot resourceRoot) { super(name, versionManager); this.resourceRoot = resourceRoot; } @Override public String getSourceRevision() { return resourceRoot.getSourceRevision(); } @Override public void refresh() throws IOException { resourceRoot.scan(); } public ResourceRoot getResourceRoot() { return resourceRoot; } }
alena1108/cattle
code/iaas/config-item/server/src/main/java/io/cattle/platform/configitem/server/model/impl/AbstractResourceRootConfigItem.java
Java
apache-2.0
945
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util.json; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; /** * JsonArray is a common non-thread safe data format for a collection of data. * The contents of a JsonArray are only validated as JSON values on * serialization. * * @see Jsoner * @since 2.0.0 */ public class JsonArray extends ArrayList<Object> implements Jsonable { /** * The serialization version this class is compatible with. This value * doesn't need to be incremented if and only if the only changes to occur * were updating comments, updating javadocs, adding new fields to the * class, changing the fields from static to non-static, or changing the * fields from transient to non transient. All other changes require this * number be incremented. */ private static final long serialVersionUID = 1L; /** Instantiates an empty JsonArray. */ public JsonArray() { } /** * Instantiate a new JsonArray using ArrayList's constructor of the same * type. * * @param collection represents the elements to produce the JsonArray with. */ public JsonArray(final Collection<?> collection) { super(collection); } /** * A convenience method that assumes every element of the JsonArray is * castable to T before adding it to a collection of Ts. * * @param <T> represents the type that all of the elements of the JsonArray * should be cast to and the type the collection will contain. * @param destination represents where all of the elements of the JsonArray * are added to after being cast to the generic type provided. * @throws ClassCastException if the unchecked cast of an element to T * fails. */ @SuppressWarnings("unchecked") public <T> void asCollection(final Collection<T> destination) { for (final Object o : this) { destination.add((T)o); } } /** * A convenience method that assumes there is a BigDecimal, Number, or * String at the given index. If a Number or String is there it is used to * construct a new BigDecimal. * * @param index representing where the value is expected to be at. * @return the value stored at the key or the default provided if the key * doesn't exist. * @throws ClassCastException if there was a value but didn't match the * assumed return types. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal. * @see BigDecimal * @see Number#doubleValue() */ public BigDecimal getBigDecimal(final int index) { Object returnable = this.get(index); if (returnable instanceof BigDecimal) { /* Success there was a BigDecimal. */ } else if (returnable instanceof Number) { /* A number can be used to construct a BigDecimal. */ returnable = new BigDecimal(returnable.toString()); } else if (returnable instanceof String) { /* A number can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return (BigDecimal)returnable; } /** * A convenience method that assumes there is a Boolean or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a boolean. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. */ public Boolean getBoolean(final int index) { Object returnable = this.get(index); if (returnable instanceof String) { returnable = Boolean.valueOf((String)returnable); } return (Boolean)returnable; } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a byte. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Byte getByte(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).byteValue(); } /** * A convenience method that assumes there is a Collection value at the * given index. * * @param <T> the kind of collection to expect at the index. Note unless * manually added, collection values will be a JsonArray. * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a Collection. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Collection */ @SuppressWarnings("unchecked") public <T extends Collection<?>> T getCollection(final int index) { /* * The unchecked warning is suppressed because there is no way of * guaranteeing at compile time the cast will work. */ return (T)this.get(index); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a double. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Double getDouble(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).doubleValue(); } /** * A convenience method that assumes there is a String value at the given * index representing a fully qualified name in dot notation of an enum. * * @param index representing where the value is expected to be at. * @param <T> the Enum type the value at the index is expected to belong to. * @return the enum based on the string found at the index, or null if the * value at the index was null. * @throws ClassNotFoundException if the element was a String but the * declaring enum type couldn't be determined with it. * @throws ClassCastException if the element at the index was not a String * or if the fully qualified enum name is of the wrong type. * @throws IllegalArgumentException if an enum type was dynamically * determined but it doesn't define an enum with the dynamically * determined name. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Enum#valueOf(Class, String) */ @SuppressWarnings("unchecked") public <T extends Enum<T>> T getEnum(final int index) throws ClassNotFoundException { /* * Supressing the unchecked warning because the returnType is * dynamically identified and could lead to a ClassCastException when * returnType is cast to Class<T>, which is expected by the method's * contract. */ T returnable; final String element; final String[] splitValues; final int numberOfValues; final StringBuilder returnTypeName; final StringBuilder enumName; final Class<T> returnType; /* Make sure the element at the index is a String. */ element = this.getString(index); if (element == null) { return null; } /* Get the package, class, and enum names. */ splitValues = element.split("\\."); numberOfValues = splitValues.length; returnTypeName = new StringBuilder(); enumName = new StringBuilder(); for (int i = 0; i < numberOfValues; i++) { if (i == (numberOfValues - 1)) { /* * If it is the last split value then it should be the name of * the Enum since dots are not allowed in enum names. */ enumName.append(splitValues[i]); } else if (i == (numberOfValues - 2)) { /* * If it is the penultimate split value then it should be the * end of the package/enum type and not need a dot appended to * it. */ returnTypeName.append(splitValues[i]); } else { /* * Must be part of the package/enum type and will need a dot * appended to it since they got removed in the split. */ returnTypeName.append(splitValues[i]); returnTypeName.append("."); } } /* Use the package/class and enum names to get the Enum<T>. */ returnType = (Class<T>)Class.forName(returnTypeName.toString()); returnable = Enum.valueOf(returnType, enumName.toString()); return returnable; } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a float. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Float getFloat(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).floatValue(); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a int. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Integer getInteger(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).intValue(); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a long. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Long getLong(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).longValue(); } /** * A convenience method that assumes there is a Map value at the given * index. * * @param <T> the kind of map to expect at the index. Note unless manually * added, Map values will be a JsonObject. * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a Map. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Map */ @SuppressWarnings("unchecked") public <T extends Map<?, ?>> T getMap(final int index) { /* * The unchecked warning is suppressed because there is no way of * guaranteeing at compile time the cast will work. */ return (T)this.get(index); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a short. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Short getShort(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).shortValue(); } /** * A convenience method that assumes there is a Boolean, Number, or String * value at the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a String. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. */ public String getString(final int index) { Object returnable = this.get(index); if (returnable instanceof Boolean) { returnable = returnable.toString(); } else if (returnable instanceof Number) { returnable = returnable.toString(); } return (String)returnable; } /* * (non-Javadoc) * @see org.apache.camel.util.json.Jsonable#asJsonString() */ @Override public String toJson() { final StringWriter writable = new StringWriter(); try { this.toJson(writable); } catch (final IOException caught) { /* See java.io.StringWriter. */ } return writable.toString(); } /* * (non-Javadoc) * @see org.apache.camel.util.json.Jsonable#toJsonString(java.io.Writer) */ @Override public void toJson(final Writer writable) throws IOException { boolean isFirstElement = true; final Iterator<Object> elements = this.iterator(); writable.write('['); while (elements.hasNext()) { if (isFirstElement) { isFirstElement = false; } else { writable.write(','); } writable.write(Jsoner.serialize(elements.next())); } writable.write(']'); } }
objectiser/camel
tooling/camel-util-json/src/main/java/org/apache/camel/util/json/JsonArray.java
Java
apache-2.0
19,042
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portlet.notice.util; import javax.portlet.PortletRequest; import javax.servlet.http.HttpServletRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Component; @Component("usernameFinder") public final class UsernameFinder { @Value("${UsernameFinder.unauthenticatedUsername}") private String unauthenticatedUsername = "guest"; private Logger logger = LoggerFactory.getLogger(getClass()); /** * @deprecated Prefer interactions that are not based on the Portlet API */ @Deprecated public String findUsername(PortletRequest req) { return req.getRemoteUser() != null ? req.getRemoteUser() : unauthenticatedUsername; } /** * @since 4.0 */ public String findUsername(HttpServletRequest request) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); logger.trace("Processing the following Authentication object: {}", authentication); final String rslt = (String) authentication.getPrincipal(); logger.debug("Found username '{}' based on the contents of the SecurityContextHolder", rslt); // Identification based on Spring Security is required to access Servlet-based APIs if (rslt == null) { throw new SecurityException("User not identified"); } return rslt; } /** * @deprecated Prefer interactions that are not based on the Portlet API */ @Deprecated public boolean isAuthenticated(PortletRequest req) { return !findUsername(req).equalsIgnoreCase(unauthenticatedUsername); } public boolean isAuthenticated(HttpServletRequest request) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); logger.trace("Processing the following Authentication object: {}", authentication); return authentication != null && authentication.isAuthenticated(); } }
Jasig/NotificationPortlet
notification-portlet-webapp/src/main/java/org/jasig/portlet/notice/util/UsernameFinder.java
Java
apache-2.0
3,039
import { registerBidder } from '../src/adapters/bidderFactory.js'; import { BANNER, NATIVE, VIDEO } from '../src/mediaTypes.js'; import * as utils from '../src/utils.js'; import { config } from '../src/config.js'; const BIDDER_CODE = 'gothamads'; const ACCOUNTID_MACROS = '[account_id]'; const URL_ENDPOINT = `https://us-e-node1.gothamads.com/bid?pass=${ACCOUNTID_MACROS}&integration=prebidjs`; const NATIVE_ASSET_IDS = { 0: 'title', 2: 'icon', 3: 'image', 5: 'sponsoredBy', 4: 'body', 1: 'cta' }; const NATIVE_PARAMS = { title: { id: 0, name: 'title' }, icon: { id: 2, type: 1, name: 'img' }, image: { id: 3, type: 3, name: 'img' }, sponsoredBy: { id: 5, name: 'data', type: 1 }, body: { id: 4, name: 'data', type: 2 }, cta: { id: 1, type: 12, name: 'data' } }; const NATIVE_VERSION = '1.2'; export const spec = { code: BIDDER_CODE, supportedMediaTypes: [BANNER, VIDEO, NATIVE], /** * Determines whether or not the given bid request is valid. * * @param {object} bid The bid to validate. * @return boolean True if this is a valid bid, and false otherwise. */ isBidRequestValid: (bid) => { return Boolean(bid.params.accountId) && Boolean(bid.params.placementId) }, /** * Make a server request from the list of BidRequests. * * @param {BidRequest[]} validBidRequests A non-empty list of valid bid requests that should be sent to the Server. * @return ServerRequest Info describing the request to the server. */ buildRequests: (validBidRequests, bidderRequest) => { if (validBidRequests && validBidRequests.length === 0) return [] let accuontId = validBidRequests[0].params.accountId; const endpointURL = URL_ENDPOINT.replace(ACCOUNTID_MACROS, accuontId); let winTop = window; let location; try { location = new URL(bidderRequest.refererInfo.referer) winTop = window.top; } catch (e) { location = winTop.location; utils.logMessage(e); }; let bids = []; for (let bidRequest of validBidRequests) { let impObject = prepareImpObject(bidRequest); let data = { id: bidRequest.bidId, test: config.getConfig('debug') ? 1 : 0, cur: ['USD'], device: { w: winTop.screen.width, h: winTop.screen.height, language: (navigator && navigator.language) ? navigator.language.indexOf('-') != -1 ? navigator.language.split('-')[0] : navigator.language : '', }, site: { page: location.pathname, host: location.host }, source: { tid: bidRequest.transactionId }, regs: { coppa: config.getConfig('coppa') === true ? 1 : 0, ext: {} }, tmax: bidRequest.timeout, imp: [impObject], }; if (bidRequest.gdprConsent && bidRequest.gdprConsent.gdprApplies) { utils.deepSetValue(data, 'regs.ext.gdpr', bidRequest.gdprConsent.gdprApplies ? 1 : 0); utils.deepSetValue(data, 'user.ext.consent', bidRequest.gdprConsent.consentString); } if (bidRequest.uspConsent !== undefined) { utils.deepSetValue(data, 'regs.ext.us_privacy', bidRequest.uspConsent); } bids.push(data) } return { method: 'POST', url: endpointURL, data: bids }; }, /** * Unpack the response from the server into a list of bids. * * @param {*} serverResponse A successful response from the server. * @return {Bid[]} An array of bids which were nested inside the server. */ interpretResponse: (serverResponse) => { if (!serverResponse || !serverResponse.body) return [] let GothamAdsResponse = serverResponse.body; let bids = []; for (let response of GothamAdsResponse) { let mediaType = response.seatbid[0].bid[0].ext && response.seatbid[0].bid[0].ext.mediaType ? response.seatbid[0].bid[0].ext.mediaType : BANNER; let bid = { requestId: response.id, cpm: response.seatbid[0].bid[0].price, width: response.seatbid[0].bid[0].w, height: response.seatbid[0].bid[0].h, ttl: response.ttl || 1200, currency: response.cur || 'USD', netRevenue: true, creativeId: response.seatbid[0].bid[0].crid, dealId: response.seatbid[0].bid[0].dealid, mediaType: mediaType }; bid.meta = {}; if (response.seatbid[0].bid[0].adomain && response.seatbid[0].bid[0].adomain.length > 0) { bid.meta.advertiserDomains = response.seatbid[0].bid[0].adomain; } switch (mediaType) { case VIDEO: bid.vastXml = response.seatbid[0].bid[0].adm; bid.vastUrl = response.seatbid[0].bid[0].ext.vastUrl; break; case NATIVE: bid.native = parseNative(response.seatbid[0].bid[0].adm); break; default: bid.ad = response.seatbid[0].bid[0].adm; } bids.push(bid); } return bids; }, }; /** * Determine type of request * * @param bidRequest * @param type * @returns {boolean} */ const checkRequestType = (bidRequest, type) => { return (typeof utils.deepAccess(bidRequest, `mediaTypes.${type}`) !== 'undefined'); } const parseNative = admObject => { const { assets, link, imptrackers, jstracker } = admObject.native; const result = { clickUrl: link.url, clickTrackers: link.clicktrackers || undefined, impressionTrackers: imptrackers || undefined, javascriptTrackers: jstracker ? [jstracker] : undefined }; assets.forEach(asset => { const kind = NATIVE_ASSET_IDS[asset.id]; const content = kind && asset[NATIVE_PARAMS[kind].name]; if (content) { result[kind] = content.text || content.value || { url: content.url, width: content.w, height: content.h }; } }); return result; } const prepareImpObject = (bidRequest) => { let impObject = { id: bidRequest.transactionId, secure: 1, ext: { placementId: bidRequest.params.placementId } }; if (checkRequestType(bidRequest, BANNER)) { impObject.banner = addBannerParameters(bidRequest); } if (checkRequestType(bidRequest, VIDEO)) { impObject.video = addVideoParameters(bidRequest); } if (checkRequestType(bidRequest, NATIVE)) { impObject.native = { ver: NATIVE_VERSION, request: addNativeParameters(bidRequest) }; } return impObject }; const addNativeParameters = bidRequest => { let impObject = { id: bidRequest.transactionId, ver: NATIVE_VERSION, }; const assets = utils._map(bidRequest.mediaTypes.native, (bidParams, key) => { const props = NATIVE_PARAMS[key]; const asset = { required: bidParams.required & 1, }; if (props) { asset.id = props.id; let wmin, hmin; let aRatios = bidParams.aspect_ratios; if (aRatios && aRatios[0]) { aRatios = aRatios[0]; wmin = aRatios.min_width || 0; hmin = aRatios.ratio_height * wmin / aRatios.ratio_width | 0; } if (bidParams.sizes) { const sizes = flatten(bidParams.sizes); wmin = sizes[0]; hmin = sizes[1]; } asset[props.name] = {} if (bidParams.len) asset[props.name]['len'] = bidParams.len; if (props.type) asset[props.name]['type'] = props.type; if (wmin) asset[props.name]['wmin'] = wmin; if (hmin) asset[props.name]['hmin'] = hmin; return asset; } }).filter(Boolean); impObject.assets = assets; return impObject } const addBannerParameters = (bidRequest) => { let bannerObject = {}; const size = parseSizes(bidRequest, 'banner'); bannerObject.w = size[0]; bannerObject.h = size[1]; return bannerObject; }; const parseSizes = (bid, mediaType) => { let mediaTypes = bid.mediaTypes; if (mediaType === 'video') { let size = []; if (mediaTypes.video && mediaTypes.video.w && mediaTypes.video.h) { size = [ mediaTypes.video.w, mediaTypes.video.h ]; } else if (Array.isArray(utils.deepAccess(bid, 'mediaTypes.video.playerSize')) && bid.mediaTypes.video.playerSize.length === 1) { size = bid.mediaTypes.video.playerSize[0]; } else if (Array.isArray(bid.sizes) && bid.sizes.length > 0 && Array.isArray(bid.sizes[0]) && bid.sizes[0].length > 1) { size = bid.sizes[0]; } return size; } let sizes = []; if (Array.isArray(mediaTypes.banner.sizes)) { sizes = mediaTypes.banner.sizes[0]; } else if (Array.isArray(bid.sizes) && bid.sizes.length > 0) { sizes = bid.sizes } else { utils.logWarn('no sizes are setup or found'); } return sizes } const addVideoParameters = (bidRequest) => { let videoObj = {}; let supportParamsList = ['mimes', 'minduration', 'maxduration', 'protocols', 'startdelay', 'placement', 'skip', 'skipafter', 'minbitrate', 'maxbitrate', 'delivery', 'playbackmethod', 'api', 'linearity'] for (let param of supportParamsList) { if (bidRequest.mediaTypes.video[param] !== undefined) { videoObj[param] = bidRequest.mediaTypes.video[param]; } } const size = parseSizes(bidRequest, 'video'); videoObj.w = size[0]; videoObj.h = size[1]; return videoObj; } const flatten = arr => { return [].concat(...arr); } registerBidder(spec);
tchibirev/Prebid.js
modules/gothamadsBidAdapter.js
JavaScript
apache-2.0
9,383
<?php /** * @category SchumacherFM * @package SchumacherFM_FastIndexer * @copyright Copyright (c) http://www.schumacher.fm * @license see LICENSE.md file * @author Cyrill at Schumacher dot fm @SchumacherFM */ class SchumacherFM_FastIndexer_Model_Lock_Session extends SchumacherFM_FastIndexer_Model_Lock_Abstract implements SchumacherFM_FastIndexer_Model_Lock_LockInterface { const SESS_PREFIX = 'fastindexer_'; /** * @var Mage_Core_Model_Resource_Session */ protected $_session = null; /** * @return Mage_Core_Model_Resource_Session */ public function getSession() { if (null !== $this->_session) { return $this->_session; } $this->_session = Mage::getResourceSingleton('core/session'); return $this->_session; } /** * Lock process without blocking. * This method allow protect multiple process running and fast lock validation. * */ public function lock() { $this->getSession()->write(self::SESS_PREFIX . $this->getIndexerCode(), microtime(true)); } /** * Lock and block process. * If new instance of the process will try validate locking state * script will wait until process will be unlocked * */ public function lockAndBlock() { $this->lock(); } /** * Unlock process * * @return Mage_Index_Model_Process */ public function unlock() { $this->getSession()->destroy(self::SESS_PREFIX . $this->getIndexerCode()); } /** * Check if process is locked * * @return bool */ public function isLocked() { $startTime = (double)$this->getSession()->read(self::SESS_PREFIX . $this->getIndexerCode()); if ($startTime < 0.0001) { return false; } return $this->_isLockedByTtl($startTime); } }
mssyogi/Magento-FastIndexer
src/app/code/community/SchumacherFM/FastIndexer/Model/Lock/Session.php
PHP
apache-2.0
1,917
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.input; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.Iterators; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.AbstractInputSource; import org.apache.druid.data.input.InputFileAttribute; import org.apache.druid.data.input.InputFormat; import org.apache.druid.data.input.InputRowSchema; import org.apache.druid.data.input.InputSourceReader; import org.apache.druid.data.input.InputSplit; import org.apache.druid.data.input.MaxSizeSplitHintSpec; import org.apache.druid.data.input.SegmentsSplitHintSpec; import org.apache.druid.data.input.SplitHintSpec; import org.apache.druid.data.input.impl.InputEntityIteratingReader; import org.apache.druid.data.input.impl.SplittableInputSource; import org.apache.druid.indexing.common.ReingestionTimelineUtils; import org.apache.druid.indexing.common.RetryPolicy; import org.apache.druid.indexing.common.RetryPolicyFactory; import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.indexing.firehose.WindowedSegmentId; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.guava.Comparators; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.loading.SegmentLoader; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; import org.apache.druid.timeline.partition.PartitionHolder; import org.apache.druid.utils.Streams; import org.joda.time.Duration; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Stream; public class DruidInputSource extends AbstractInputSource implements SplittableInputSource<List<WindowedSegmentId>> { private static final Logger LOG = new Logger(DruidInputSource.class); /** * A Comparator that orders {@link WindowedSegmentId} mainly by segmentId (which is important), and then by intervals * (which is arbitrary, and only here for totality of ordering). */ private static final Comparator<WindowedSegmentId> WINDOWED_SEGMENT_ID_COMPARATOR = Comparator.comparing(WindowedSegmentId::getSegmentId) .thenComparing(windowedSegmentId -> windowedSegmentId.getIntervals().size()) .thenComparing( (WindowedSegmentId a, WindowedSegmentId b) -> { // Same segmentId, same intervals list size. Compare each interval. int cmp = 0; for (int i = 0; i < a.getIntervals().size(); i++) { cmp = Comparators.intervalsByStartThenEnd() .compare(a.getIntervals().get(i), b.getIntervals().get(i)); if (cmp != 0) { return cmp; } } return cmp; } ); private final String dataSource; // Exactly one of interval and segmentIds should be non-null. Typically 'interval' is specified directly // by the user creating this firehose and 'segmentIds' is used for sub-tasks if it is split for parallel // batch ingestion. @Nullable private final Interval interval; @Nullable private final List<WindowedSegmentId> segmentIds; private final DimFilter dimFilter; private final List<String> dimensions; private final List<String> metrics; private final IndexIO indexIO; private final CoordinatorClient coordinatorClient; private final SegmentLoaderFactory segmentLoaderFactory; private final RetryPolicyFactory retryPolicyFactory; @JsonCreator public DruidInputSource( @JsonProperty("dataSource") final String dataSource, @JsonProperty("interval") @Nullable Interval interval, // Specifying "segments" is intended only for when this FirehoseFactory has split itself, // not for direct end user use. @JsonProperty("segments") @Nullable List<WindowedSegmentId> segmentIds, @JsonProperty("filter") DimFilter dimFilter, @Nullable @JsonProperty("dimensions") List<String> dimensions, @Nullable @JsonProperty("metrics") List<String> metrics, @JacksonInject IndexIO indexIO, @JacksonInject CoordinatorClient coordinatorClient, @JacksonInject SegmentLoaderFactory segmentLoaderFactory, @JacksonInject RetryPolicyFactory retryPolicyFactory ) { Preconditions.checkNotNull(dataSource, "dataSource"); if ((interval == null && segmentIds == null) || (interval != null && segmentIds != null)) { throw new IAE("Specify exactly one of 'interval' and 'segments'"); } this.dataSource = dataSource; this.interval = interval; this.segmentIds = segmentIds; this.dimFilter = dimFilter; this.dimensions = dimensions; this.metrics = metrics; this.indexIO = Preconditions.checkNotNull(indexIO, "null IndexIO"); this.coordinatorClient = Preconditions.checkNotNull(coordinatorClient, "null CoordinatorClient"); this.segmentLoaderFactory = Preconditions.checkNotNull(segmentLoaderFactory, "null SegmentLoaderFactory"); this.retryPolicyFactory = Preconditions.checkNotNull(retryPolicyFactory, "null RetryPolicyFactory"); } @JsonProperty public String getDataSource() { return dataSource; } @Nullable @JsonProperty public Interval getInterval() { return interval; } @Nullable @JsonProperty("segments") @JsonInclude(Include.NON_NULL) public List<WindowedSegmentId> getSegmentIds() { return segmentIds; } @JsonProperty("filter") public DimFilter getDimFilter() { return dimFilter; } @JsonProperty public List<String> getDimensions() { return dimensions; } @JsonProperty public List<String> getMetrics() { return metrics; } @Override protected InputSourceReader fixedFormatReader(InputRowSchema inputRowSchema, @Nullable File temporaryDirectory) { final SegmentLoader segmentLoader = segmentLoaderFactory.manufacturate(temporaryDirectory); final List<TimelineObjectHolder<String, DataSegment>> timeline = createTimeline(); final Iterator<DruidSegmentInputEntity> entityIterator = FluentIterable .from(timeline) .transformAndConcat(holder -> { //noinspection ConstantConditions final PartitionHolder<DataSegment> partitionHolder = holder.getObject(); //noinspection ConstantConditions return FluentIterable .from(partitionHolder) .transform(chunk -> new DruidSegmentInputEntity(segmentLoader, chunk.getObject(), holder.getInterval())); }).iterator(); final List<String> effectiveDimensions = ReingestionTimelineUtils.getDimensionsToReingest( dimensions, inputRowSchema.getDimensionsSpec(), timeline ); List<String> effectiveMetrics; if (metrics == null) { effectiveMetrics = ReingestionTimelineUtils.getUniqueMetrics(timeline); } else { effectiveMetrics = metrics; } final DruidSegmentInputFormat inputFormat = new DruidSegmentInputFormat( indexIO, dimFilter, effectiveDimensions, effectiveMetrics ); return new InputEntityIteratingReader( inputRowSchema, inputFormat, entityIterator, temporaryDirectory ); } private List<TimelineObjectHolder<String, DataSegment>> createTimeline() { if (interval == null) { return getTimelineForSegmentIds(coordinatorClient, dataSource, segmentIds); } else { return getTimelineForInterval(coordinatorClient, retryPolicyFactory, dataSource, interval); } } @Override public Stream<InputSplit<List<WindowedSegmentId>>> createSplits( InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec ) { // segmentIds is supposed to be specified by the supervisor task during the parallel indexing. // If it's not null, segments are already split by the supervisor task and further split won't happen. if (segmentIds == null) { return Streams.sequentialStreamFrom( createSplits( coordinatorClient, retryPolicyFactory, dataSource, interval, splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec ) ); } else { return Stream.of(new InputSplit<>(segmentIds)); } } @Override public int estimateNumSplits(InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec) { // segmentIds is supposed to be specified by the supervisor task during the parallel indexing. // If it's not null, segments are already split by the supervisor task and further split won't happen. if (segmentIds == null) { return Iterators.size( createSplits( coordinatorClient, retryPolicyFactory, dataSource, interval, splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec ) ); } else { return 1; } } @Override public SplittableInputSource<List<WindowedSegmentId>> withSplit(InputSplit<List<WindowedSegmentId>> split) { return new DruidInputSource( dataSource, null, split.get(), dimFilter, dimensions, metrics, indexIO, coordinatorClient, segmentLoaderFactory, retryPolicyFactory ); } @Override public boolean needsFormat() { return false; } public static Iterator<InputSplit<List<WindowedSegmentId>>> createSplits( CoordinatorClient coordinatorClient, RetryPolicyFactory retryPolicyFactory, String dataSource, Interval interval, SplitHintSpec splitHintSpec ) { final SplitHintSpec convertedSplitHintSpec; if (splitHintSpec instanceof SegmentsSplitHintSpec) { final SegmentsSplitHintSpec segmentsSplitHintSpec = (SegmentsSplitHintSpec) splitHintSpec; convertedSplitHintSpec = new MaxSizeSplitHintSpec( segmentsSplitHintSpec.getMaxInputSegmentBytesPerTask(), segmentsSplitHintSpec.getMaxNumSegments() ); } else { convertedSplitHintSpec = splitHintSpec; } final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = getTimelineForInterval( coordinatorClient, retryPolicyFactory, dataSource, interval ); final Map<WindowedSegmentId, Long> segmentIdToSize = createWindowedSegmentIdFromTimeline(timelineSegments); //noinspection ConstantConditions return Iterators.transform( convertedSplitHintSpec.split( // segmentIdToSize is sorted by segment ID; useful for grouping up segments from the same time chunk into // the same input split. segmentIdToSize.keySet().iterator(), segmentId -> new InputFileAttribute( Preconditions.checkNotNull(segmentIdToSize.get(segmentId), "segment size for [%s]", segmentId) ) ), InputSplit::new ); } /** * Returns a map of {@link WindowedSegmentId} to size, sorted by {@link WindowedSegmentId#getSegmentId()}. */ private static SortedMap<WindowedSegmentId, Long> createWindowedSegmentIdFromTimeline( List<TimelineObjectHolder<String, DataSegment>> timelineHolders ) { Map<DataSegment, WindowedSegmentId> windowedSegmentIds = new HashMap<>(); for (TimelineObjectHolder<String, DataSegment> holder : timelineHolders) { for (PartitionChunk<DataSegment> chunk : holder.getObject()) { windowedSegmentIds.computeIfAbsent( chunk.getObject(), segment -> new WindowedSegmentId(segment.getId().toString(), new ArrayList<>()) ).addInterval(holder.getInterval()); } } // It is important to create this map after windowedSegmentIds is completely filled, because WindowedSegmentIds // can be updated while being constructed. (Intervals are added.) SortedMap<WindowedSegmentId, Long> segmentSizeMap = new TreeMap<>(WINDOWED_SEGMENT_ID_COMPARATOR); windowedSegmentIds.forEach((segment, segmentId) -> segmentSizeMap.put(segmentId, segment.getSize())); return segmentSizeMap; } public static List<TimelineObjectHolder<String, DataSegment>> getTimelineForInterval( CoordinatorClient coordinatorClient, RetryPolicyFactory retryPolicyFactory, String dataSource, Interval interval ) { Preconditions.checkNotNull(interval); // This call used to use the TaskActionClient, so for compatibility we use the same retry configuration // as TaskActionClient. final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy(); Collection<DataSegment> usedSegments; while (true) { try { usedSegments = coordinatorClient.fetchUsedSegmentsInDataSourceForIntervals( dataSource, Collections.singletonList(interval) ); break; } catch (Throwable e) { LOG.warn(e, "Exception getting database segments"); final Duration delay = retryPolicy.getAndIncrementRetryDelay(); if (delay == null) { throw e; } else { final long sleepTime = jitter(delay.getMillis()); LOG.info("Will try again in [%s].", new Duration(sleepTime).toString()); try { Thread.sleep(sleepTime); } catch (InterruptedException e2) { throw new RuntimeException(e2); } } } } return VersionedIntervalTimeline.forSegments(usedSegments).lookup(interval); } public static List<TimelineObjectHolder<String, DataSegment>> getTimelineForSegmentIds( CoordinatorClient coordinatorClient, String dataSource, List<WindowedSegmentId> segmentIds ) { final SortedMap<Interval, TimelineObjectHolder<String, DataSegment>> timeline = new TreeMap<>( Comparators.intervalsByStartThenEnd() ); for (WindowedSegmentId windowedSegmentId : Preconditions.checkNotNull(segmentIds, "segmentIds")) { final DataSegment segment = coordinatorClient.fetchUsedSegment( dataSource, windowedSegmentId.getSegmentId() ); for (Interval interval : windowedSegmentId.getIntervals()) { final TimelineObjectHolder<String, DataSegment> existingHolder = timeline.get(interval); if (existingHolder != null) { if (!existingHolder.getVersion().equals(segment.getVersion())) { throw new ISE("Timeline segments with the same interval should have the same version: " + "existing version[%s] vs new segment[%s]", existingHolder.getVersion(), segment); } existingHolder.getObject().add(segment.getShardSpec().createChunk(segment)); } else { timeline.put( interval, new TimelineObjectHolder<>( interval, segment.getInterval(), segment.getVersion(), new PartitionHolder<>(segment.getShardSpec().createChunk(segment)) ) ); } } } // Validate that none of the given windows overlaps (except for when multiple segments share exactly the // same interval). Interval lastInterval = null; for (Interval interval : timeline.keySet()) { if (lastInterval != null && interval.overlaps(lastInterval)) { throw new IAE( "Distinct intervals in input segments may not overlap: [%s] vs [%s]", lastInterval, interval ); } lastInterval = interval; } return new ArrayList<>(timeline.values()); } private static long jitter(long input) { final double jitter = ThreadLocalRandom.current().nextGaussian() * input / 4.0; long retval = input + (long) jitter; return retval < 0 ? 0 : retval; } }
gianm/druid
indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java
Java
apache-2.0
17,743
// // immer: immutable data structures for C++ // Copyright (C) 2016, 2017, 2018 Juan Pedro Bolivar Puente // // This software is distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE or copy at http://boost.org/LICENSE_1_0.txt // #include "fuzzer_gc_guard.hpp" #include "fuzzer_input.hpp" #include <immer/heap/gc_heap.hpp> #include <immer/refcount/no_refcount_policy.hpp> #include <immer/set.hpp> #include <immer/algorithm.hpp> #include <array> using gc_memory = immer::memory_policy<immer::heap_policy<immer::gc_heap>, immer::no_refcount_policy, immer::default_lock_policy, immer::gc_transience_policy, false>; struct colliding_hash_t { std::size_t operator()(std::size_t x) const { return x & ~15; } }; extern "C" int LLVMFuzzerTestOneInput(const std::uint8_t* data, std::size_t size) { auto guard = fuzzer_gc_guard{}; constexpr auto var_count = 4; using set_t = immer::set<size_t, colliding_hash_t, std::equal_to<>, gc_memory>; auto vars = std::array<set_t, var_count>{}; auto is_valid_var = [&](auto idx) { return idx >= 0 && idx < var_count; }; return fuzzer_input{data, size}.run([&](auto& in) { enum ops { op_insert, op_erase, op_insert_move, op_erase_move, op_iterate }; auto src = read<char>(in, is_valid_var); auto dst = read<char>(in, is_valid_var); switch (read<char>(in)) { case op_insert: { auto value = read<size_t>(in); vars[dst] = vars[src].insert(value); break; } case op_erase: { auto value = read<size_t>(in); vars[dst] = vars[src].erase(value); break; } case op_insert_move: { auto value = read<size_t>(in); vars[dst] = std::move(vars[src]).insert(value); break; } case op_erase_move: { auto value = read<size_t>(in); vars[dst] = std::move(vars[src]).erase(value); break; } case op_iterate: { auto srcv = vars[src]; immer::for_each(srcv, [&](auto&& v) { vars[dst] = std::move(vars[dst]).insert(v); }); break; } default: break; }; return true; }); }
arangodb/arangodb
3rdParty/immer/v0.7.0/extra/fuzzer/set-gc.cpp
C++
apache-2.0
2,591
package pod import ( "fmt" "strings" lru "github.com/hashicorp/golang-lru" "github.com/rancher/norman/api/access" "github.com/rancher/norman/types" "github.com/rancher/norman/types/values" "github.com/rancher/rancher/pkg/controllers/managementagent/workload" "github.com/rancher/rancher/pkg/ref" schema "github.com/rancher/rancher/pkg/schemas/project.cattle.io/v3" "github.com/sirupsen/logrus" ) var ( ownerCache, _ = lru.New(100000) ) type key struct { SubContext string Namespace string Kind string Name string } type value struct { Kind string Name string } func getOwnerWithKind(apiContext *types.APIContext, namespace, ownerKind, name string) (string, string, error) { subContext := apiContext.SubContext["/v3/schemas/project"] if subContext == "" { subContext = apiContext.SubContext["/v3/schemas/cluster"] } if subContext == "" { logrus.Warnf("failed to find subcontext to lookup replicaSet owner") return "", "", nil } key := key{ SubContext: subContext, Namespace: namespace, Kind: strings.ToLower(ownerKind), Name: name, } val, ok := ownerCache.Get(key) if ok { value, _ := val.(value) return value.Kind, value.Name, nil } data := map[string]interface{}{} if err := access.ByID(apiContext, &schema.Version, ownerKind, ref.FromStrings(namespace, name), &data); err != nil { return "", "", err } kind, name := getOwner(data) if !workload.WorkloadKinds[kind] { kind = "" name = "" } ownerCache.Add(key, value{ Kind: kind, Name: name, }) return kind, name, nil } func getOwner(data map[string]interface{}) (string, string) { ownerReferences, ok := values.GetSlice(data, "ownerReferences") if !ok { return "", "" } for _, ownerReference := range ownerReferences { controller, _ := ownerReference["controller"].(bool) if !controller { continue } kind, _ := ownerReference["kind"].(string) name, _ := ownerReference["name"].(string) return kind, name } return "", "" } func SaveOwner(apiContext *types.APIContext, kind, name string, data map[string]interface{}) { parentKind, parentName := getOwner(data) namespace, _ := data["namespaceId"].(string) subContext := apiContext.SubContext["/v3/schemas/project"] if subContext == "" { subContext = apiContext.SubContext["/v3/schemas/cluster"] } if subContext == "" { return } key := key{ SubContext: subContext, Namespace: namespace, Kind: strings.ToLower(kind), Name: name, } ownerCache.Add(key, value{ Kind: parentKind, Name: parentName, }) } func resolveWorkloadID(apiContext *types.APIContext, data map[string]interface{}) string { kind, name := getOwner(data) if kind == "" || !workload.WorkloadKinds[kind] { return "" } namespace, _ := data["namespaceId"].(string) if ownerKind := strings.ToLower(kind); ownerKind == workload.ReplicaSetType || ownerKind == workload.JobType { k, n, err := getOwnerWithKind(apiContext, namespace, ownerKind, name) if err != nil { return "" } if k != "" { kind, name = k, n } } return strings.ToLower(fmt.Sprintf("%s:%s:%s", kind, namespace, name)) }
rancher/rancher
pkg/api/norman/store/pod/owner.go
GO
apache-2.0
3,140
import java.io.File; import java.io.FilenameFilter; class A { { new java.io.File("aaa").list(new FilenameFilter() { public boolean accept(File dir, String name) { <selection>return false; //To change body of implemented methods use File | Settings | File Templates.</selection> } }); } }
joewalnes/idea-community
java/java-tests/testData/codeInsight/completion/style/AfterNew15-out.java
Java
apache-2.0
329
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.launcher3; import android.content.ContentValues; import android.content.Context; import com.android.launcher3.compat.UserHandleCompat; import java.util.ArrayList; /** * Represents a folder containing shortcuts or apps. */ public class FolderInfo extends ItemInfo { public static final int NO_FLAGS = 0x00000000; /** * The folder is locked in sorted mode */ public static final int FLAG_ITEMS_SORTED = 0x00000001; /** * It is a work folder */ public static final int FLAG_WORK_FOLDER = 0x00000002; /** * The multi-page animation has run for this folder */ public static final int FLAG_MULTI_PAGE_ANIMATION = 0x00000004; /** * Whether this folder has been opened */ public boolean opened; public int options; /** * The apps and shortcuts */ public ArrayList<ShortcutInfo> contents = new ArrayList<ShortcutInfo>(); ArrayList<FolderListener> listeners = new ArrayList<FolderListener>(); public FolderInfo() { itemType = LauncherSettings.Favorites.ITEM_TYPE_FOLDER; user = UserHandleCompat.myUserHandle(); } /** * Add an app or shortcut * * @param item */ public void add(ShortcutInfo item, boolean animate) { contents.add(item); for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onAdd(item); } itemsChanged(animate); } /** * Remove an app or shortcut. Does not change the DB. * * @param item */ public void remove(ShortcutInfo item, boolean animate) { contents.remove(item); for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onRemove(item); } itemsChanged(animate); } public void setTitle(CharSequence title) { this.title = title; for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onTitleChanged(title); } } @Override void onAddToDatabase(Context context, ContentValues values) { super.onAddToDatabase(context, values); values.put(LauncherSettings.Favorites.TITLE, title.toString()); values.put(LauncherSettings.Favorites.OPTIONS, options); } public void addListener(FolderListener listener) { listeners.add(listener); } public void removeListener(FolderListener listener) { listeners.remove(listener); } public void itemsChanged(boolean animate) { for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onItemsChanged(animate); } } public interface FolderListener { public void onAdd(ShortcutInfo item); public void onRemove(ShortcutInfo item); public void onTitleChanged(CharSequence title); public void onItemsChanged(boolean animate); } public boolean hasOption(int optionFlag) { return (options & optionFlag) != 0; } /** * @param option flag to set or clear * @param isEnabled whether to set or clear the flag * @param context if not null, save changes to the db. */ public void setOption(int option, boolean isEnabled, Context context) { int oldOptions = options; if (isEnabled) { options |= option; } else { options &= ~option; } if (context != null && oldOptions != options) { LauncherModel.updateItemInDatabase(context, this); } } }
YAJATapps/FlickLauncher
src/com/android/launcher3/FolderInfo.java
Java
apache-2.0
4,146
/* * Copyright 2014 BrightTag, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.brighttag.agathon.dao; import javax.annotation.Nullable; import com.google.common.collect.ImmutableSet; import com.brighttag.agathon.model.CassandraInstance; /** * DAO for Cassandra Instances. * * @author codyaray * @since 5/12/2012 */ public interface CassandraInstanceDao { /** * Returns the set of Cassandra instances in a ring. * * @param ring name of the Cassandra ring * @return set of Cassandra instances in the ring * @throws BackingStoreException if there was a problem communicating with the backing store. */ ImmutableSet<CassandraInstance> findAll(String ring) throws BackingStoreException; /** * Returns the Cassandra instance with the given {@code id} or {@code null} if not found. * * @param ring name of the Cassandra ring * @param id the Cassandra instance ID * @return the Cassandra instance or {@code null} if not found * @throws BackingStoreException if there was a problem communicating with the backing store. */ @Nullable CassandraInstance findById(String ring, int id) throws BackingStoreException; /** * Saves the Cassandra {@code instance}. * * @param ring name of the Cassandra ring * @param instance the Cassandra instance */ void save(String ring, CassandraInstance instance); /** * Deletes the Cassandra {@code instance}. * * @param ring name of the Cassandra ring * @param instance the Cassandra instance */ void delete(String ring, CassandraInstance instance); }
BrightTag/agathon
agathon-manager/src/main/java/com/brighttag/agathon/dao/CassandraInstanceDao.java
Java
apache-2.0
2,133
/* * MainActivity.java * * Copyright (C) 2013 6 Wunderkinder GmbH. * * @author Jose L Ugia - @Jl_Ugia * @author Antonio Consuegra - @aconsuegra * @author Cesar Valiente - @CesarValiente * @author Benedikt Lehnert - @blehnert * @author Timothy Achumba - @iam_timm * @version 1.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wunderlist.slidinglayersample; import android.annotation.SuppressLint; import android.app.Activity; import android.content.SharedPreferences; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.KeyEvent; import android.view.MenuItem; import android.view.View; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; import com.wunderlist.slidinglayer.LayerTransformer; import com.wunderlist.slidinglayer.SlidingLayer; import com.wunderlist.slidinglayer.transformer.AlphaTransformer; import com.wunderlist.slidinglayer.transformer.RotationTransformer; import com.wunderlist.slidinglayer.transformer.SlideJoyTransformer; public class MainActivity extends Activity { private SlidingLayer mSlidingLayer; private TextView swipeText; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); bindViews(); initState(); } @SuppressLint("NewApi") @Override protected void onResume() { super.onResume(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { getActionBar().setDisplayHomeAsUpEnabled(true); } } /** * View binding */ private void bindViews() { mSlidingLayer = (SlidingLayer) findViewById(R.id.slidingLayer1); swipeText = (TextView) findViewById(R.id.swipeText); } /** * Initializes the origin state of the layer */ private void initState() { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); setupSlidingLayerPosition(prefs.getString("layer_location", "right")); setupSlidingLayerTransform(prefs.getString("layer_transform", "none")); setupShadow(prefs.getBoolean("layer_has_shadow", false)); setupLayerOffset(prefs.getBoolean("layer_has_offset", false)); setupPreviewMode(prefs.getBoolean("preview_mode_enabled", false)); } private void setupSlidingLayerPosition(String layerPosition) { LayoutParams rlp = (LayoutParams) mSlidingLayer.getLayoutParams(); int textResource; Drawable d; switch (layerPosition) { case "right": textResource = R.string.swipe_right_label; d = getResources().getDrawable(R.drawable.container_rocket_right); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_RIGHT); break; case "left": textResource = R.string.swipe_left_label; d = getResources().getDrawable(R.drawable.container_rocket_left); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_LEFT); break; case "top": textResource = R.string.swipe_up_label; d = getResources().getDrawable(R.drawable.container_rocket); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_TOP); rlp.width = LayoutParams.MATCH_PARENT; rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size); break; default: textResource = R.string.swipe_down_label; d = getResources().getDrawable(R.drawable.container_rocket); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_BOTTOM); rlp.width = LayoutParams.MATCH_PARENT; rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size); } d.setBounds(0, 0, d.getIntrinsicWidth(), d.getIntrinsicHeight()); swipeText.setCompoundDrawables(null, d, null, null); swipeText.setText(getResources().getString(textResource)); mSlidingLayer.setLayoutParams(rlp); } private void setupSlidingLayerTransform(String layerTransform) { LayerTransformer transformer; switch (layerTransform) { case "alpha": transformer = new AlphaTransformer(); break; case "rotation": transformer = new RotationTransformer(); break; case "slide": transformer = new SlideJoyTransformer(); break; default: return; } mSlidingLayer.setLayerTransformer(transformer); } private void setupShadow(boolean enabled) { if (enabled) { mSlidingLayer.setShadowSizeRes(R.dimen.shadow_size); mSlidingLayer.setShadowDrawable(R.drawable.sidebar_shadow); } else { mSlidingLayer.setShadowSize(0); mSlidingLayer.setShadowDrawable(null); } } private void setupLayerOffset(boolean enabled) { int offsetDistance = enabled ? getResources().getDimensionPixelOffset(R.dimen.offset_distance) : 0; mSlidingLayer.setOffsetDistance(offsetDistance); } private void setupPreviewMode(boolean enabled) { int previewOffset = enabled ? getResources().getDimensionPixelOffset(R.dimen.preview_offset_distance) : -1; mSlidingLayer.setPreviewOffsetDistance(previewOffset); } public void buttonClicked(View v) { switch (v.getId()) { case R.id.buttonOpen: mSlidingLayer.openLayer(true); break; case R.id.buttonClose: mSlidingLayer.closeLayer(true); break; } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (mSlidingLayer.isOpened()) { mSlidingLayer.closeLayer(true); return true; } default: return super.onKeyDown(keyCode, event); } } @Override public boolean onOptionsItemSelected(MenuItem item) { finish(); return true; } }
yadihaoku/android-sliding-layer-lib
SlidingLayerSample/src/main/java/com/wunderlist/slidinglayersample/MainActivity.java
Java
apache-2.0
6,768
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; /** * @class * Initializes a new instance of the USqlExternalDataSourceList class. * @constructor * A Data Lake Analytics catalog U-SQL external datasource item list. * */ class USqlExternalDataSourceList extends Array { constructor() { super(); } /** * Defines the metadata of USqlExternalDataSourceList * * @returns {object} metadata of USqlExternalDataSourceList * */ mapper() { return { required: false, serializedName: 'USqlExternalDataSourceList', type: { name: 'Composite', className: 'USqlExternalDataSourceList', modelProperties: { nextLink: { required: false, serializedName: 'nextLink', type: { name: 'String' } }, value: { required: false, readOnly: true, serializedName: '', type: { name: 'Sequence', element: { required: false, serializedName: 'USqlExternalDataSourceElementType', type: { name: 'Composite', className: 'USqlExternalDataSource' } } } } } } }; } } module.exports = USqlExternalDataSourceList;
AuxMon/azure-sdk-for-node
lib/services/dataLake.Analytics/lib/catalog/models/uSqlExternalDataSourceList.js
JavaScript
apache-2.0
1,673
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ name: 'LogEntryView', package: 'foam.flow', extends: 'foam.flow.Element', constants: { ELEMENT_NAME: 'log-entry' }, properties: [ { name: 'data', // type: 'foam.flow.LogEntry' } ], templates: [ function toInnerHTML() {/* <num>{{this.data.id}}</num><{{{this.data.mode}}}>{{this.data.contents}}</{{{this.data.mode}}}> */}, function CSS() {/* log-entry { display: flex; } log-entry > num { min-width: 35px; max-width: 35px; display: inline-block; text-align: right; padding-right: 13px; font-weight: bold; -webkit-touch-callout: none; -webkit-user-select: none; -khtml-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; background: #E0E0E0; } log-entry > log, log-entry > warn, log-entry > error { padding-left: 4px; white-space: pre-wrap; } log-entry > log { color: #333; } log-entry > warn { color: #CC9900; } log-entry > error { color: #C00; } */} ] });
jlhughes/foam
js/foam/flow/LogEntryView.js
JavaScript
apache-2.0
1,487
(function() { function LandingCtrl() { this.heroTitle = "Turn the Music Up!"; } angular .module('blocJams') .controller('LandingCtrl', LandingCtrl); })();
ganaraja/bloc-jams-angular
dist/scripts/controllers/LandingCtrl.js
JavaScript
apache-2.0
195
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * ClientInfo.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: #axisVersion# #today# */ package org.apache.axis2.databinding; import org.apache.axiom.om.OMFactory; import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; /** ClientInfo bean class */ public class ClientInfo implements org.apache.axis2.databinding.ADBBean { /* This type was generated from the piece of schema that had name = ClientInfo Namespace URI = http://www.wso2.com/types Namespace Prefix = ns1 */ public ClientInfo(String localName, String localSsn) { this.localName = localName; this.localSsn = localSsn; } public ClientInfo() { } /** field for Name */ protected java.lang.String localName; /** * Auto generated getter method * * @return java.lang.String */ public java.lang.String getName() { return localName; } /** * Auto generated setter method * * @param param Name */ public void setName(java.lang.String param) { this.localName = param; } /** field for Ssn */ protected java.lang.String localSsn; /** * Auto generated getter method * * @return java.lang.String */ public java.lang.String getSsn() { return localSsn; } /** * Auto generated setter method * * @param param Ssn */ public void setSsn(java.lang.String param) { this.localSsn = param; } /** databinding method to get an XML representation of this object */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) { java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types", "name")); elementList .add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName)); elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types", "ssn")); elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSsn)); return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl (qName, elementList.toArray(), attribList.toArray()); } public void serialize(final QName parentQName, final OMFactory factory, MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException, ADBException { serialize(parentQName,factory,xmlWriter,false); } public void serialize(final QName parentQName, final OMFactory factory, MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws XMLStreamException, ADBException { throw new UnsupportedOperationException("Un implemented method"); } /** Factory class that keeps the parse method */ public static class Factory { /** static method to create the object */ public static ClientInfo parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { ClientInfo object = new ClientInfo(); try { int event = reader.getEventType(); int count = 0; int argumentCount = 2; boolean done = false; //event better be a START_ELEMENT. if not we should go up to the start element here while (!reader.isStartElement()) { event = reader.next(); } while (!done) { if (javax.xml.stream.XMLStreamConstants.START_ELEMENT == event) { if ("name".equals(reader.getLocalName())) { String content = reader.getElementText(); object.setName( org.apache.axis2.databinding.utils.ConverterUtil.convertToString( content)); count++; } if ("ssn".equals(reader.getLocalName())) { String content = reader.getElementText(); object.setSsn( org.apache.axis2.databinding.utils.ConverterUtil.convertToString( content)); count++; } } if (argumentCount == count) { done = true; } if (!done) { event = reader.next(); } } } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
intalio/axis2
modules/adb/test/org/apache/axis2/databinding/ClientInfo.java
Java
apache-2.0
6,169
<?php require_once '../autoload.php'; use Qiniu\Auth; $accessKey = 'Access_Key'; $secretKey = 'Secret_Key'; $auth = new Auth($accessKey, $secretKey); $bucket = 'Bucket_Name'; $upToken = $auth->uploadToken($bucket); echo $upToken;
976112643/manor
php-sdk-master/examples/upload_token.php
PHP
apache-2.0
234
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.datavec.api.transform.transform.time; import lombok.Data; import lombok.EqualsAndHashCode; import org.datavec.api.transform.ColumnType; import org.datavec.api.transform.Transform; import org.datavec.api.transform.metadata.ColumnMetaData; import org.datavec.api.transform.metadata.IntegerMetaData; import org.datavec.api.transform.metadata.StringMetaData; import org.datavec.api.transform.metadata.TimeMetaData; import org.datavec.api.transform.schema.Schema; import org.datavec.api.util.jackson.DateTimeFieldTypeDeserializer; import org.datavec.api.util.jackson.DateTimeFieldTypeSerializer; import org.datavec.api.writable.IntWritable; import org.datavec.api.writable.Text; import org.datavec.api.writable.Writable; import org.joda.time.DateTime; import org.joda.time.DateTimeFieldType; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.nd4j.shade.jackson.annotation.JsonIgnore; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; import org.nd4j.shade.jackson.annotation.JsonInclude; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize; import org.nd4j.shade.jackson.databind.annotation.JsonSerialize; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * Create a number of new columns by deriving their values from a Time column. * Can be used for example to create new columns with the year, month, day, hour, minute, second etc. * * @author Alex Black */ @JsonIgnoreProperties({"inputSchema", "insertAfterIdx", "deriveFromIdx"}) @EqualsAndHashCode(exclude = {"inputSchema", "insertAfterIdx", "deriveFromIdx"}) @Data public class DeriveColumnsFromTimeTransform implements Transform { private final String columnName; private final String insertAfter; private DateTimeZone inputTimeZone; private final List<DerivedColumn> derivedColumns; private Schema inputSchema; private int insertAfterIdx = -1; private int deriveFromIdx = -1; private DeriveColumnsFromTimeTransform(Builder builder) { this.derivedColumns = builder.derivedColumns; this.columnName = builder.columnName; this.insertAfter = builder.insertAfter; } public DeriveColumnsFromTimeTransform(@JsonProperty("columnName") String columnName, @JsonProperty("insertAfter") String insertAfter, @JsonProperty("inputTimeZone") DateTimeZone inputTimeZone, @JsonProperty("derivedColumns") List<DerivedColumn> derivedColumns) { this.columnName = columnName; this.insertAfter = insertAfter; this.inputTimeZone = inputTimeZone; this.derivedColumns = derivedColumns; } @Override public Schema transform(Schema inputSchema) { List<ColumnMetaData> oldMeta = inputSchema.getColumnMetaData(); List<ColumnMetaData> newMeta = new ArrayList<>(oldMeta.size() + derivedColumns.size()); List<String> oldNames = inputSchema.getColumnNames(); for (int i = 0; i < oldMeta.size(); i++) { String current = oldNames.get(i); newMeta.add(oldMeta.get(i)); if (insertAfter.equals(current)) { //Insert the derived columns here for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: newMeta.add(new StringMetaData(d.columnName)); break; case Integer: newMeta.add(new IntegerMetaData(d.columnName)); //TODO: ranges... if it's a day, we know it must be 1 to 31, etc... break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } } } return inputSchema.newSchema(newMeta); } @Override public void setInputSchema(Schema inputSchema) { insertAfterIdx = inputSchema.getColumnNames().indexOf(insertAfter); if (insertAfterIdx == -1) { throw new IllegalStateException( "Invalid schema/insert after column: input schema does not contain column \"" + insertAfter + "\""); } deriveFromIdx = inputSchema.getColumnNames().indexOf(columnName); if (deriveFromIdx == -1) { throw new IllegalStateException( "Invalid source column: input schema does not contain column \"" + columnName + "\""); } this.inputSchema = inputSchema; if (!(inputSchema.getMetaData(columnName) instanceof TimeMetaData)) throw new IllegalStateException("Invalid state: input column \"" + columnName + "\" is not a time column. Is: " + inputSchema.getMetaData(columnName)); TimeMetaData meta = (TimeMetaData) inputSchema.getMetaData(columnName); inputTimeZone = meta.getTimeZone(); } @Override public Schema getInputSchema() { return inputSchema; } @Override public List<Writable> map(List<Writable> writables) { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() + "). Transform = " + toString()); } int i = 0; Writable source = writables.get(deriveFromIdx); List<Writable> list = new ArrayList<>(writables.size() + derivedColumns.size()); for (Writable w : writables) { list.add(w); if (i++ == insertAfterIdx) { for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: list.add(new Text(d.dateTimeFormatter.print(source.toLong()))); break; case Integer: DateTime dt = new DateTime(source.toLong(), inputTimeZone); list.add(new IntWritable(dt.get(d.fieldType))); break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } } } return list; } @Override public List<List<Writable>> mapSequence(List<List<Writable>> sequence) { List<List<Writable>> out = new ArrayList<>(sequence.size()); for (List<Writable> step : sequence) { out.add(map(step)); } return out; } /** * Transform an object * in to another object * * @param input the record to transform * @return the transformed writable */ @Override public Object map(Object input) { List<Object> ret = new ArrayList<>(); Long l = (Long) input; for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: ret.add(d.dateTimeFormatter.print(l)); break; case Integer: DateTime dt = new DateTime(l, inputTimeZone); ret.add(dt.get(d.fieldType)); break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } return ret; } /** * Transform a sequence * * @param sequence */ @Override public Object mapSequence(Object sequence) { List<Long> longs = (List<Long>) sequence; List<List<Object>> ret = new ArrayList<>(); for (Long l : longs) ret.add((List<Object>) map(l)); return ret; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("DeriveColumnsFromTimeTransform(timeColumn=\"").append(columnName).append("\",insertAfter=\"") .append(insertAfter).append("\",derivedColumns=("); boolean first = true; for (DerivedColumn d : derivedColumns) { if (!first) sb.append(","); sb.append(d); first = false; } sb.append("))"); return sb.toString(); } /** * The output column name * after the operation has been applied * * @return the output column name */ @Override public String outputColumnName() { return outputColumnNames()[0]; } /** * The output column names * This will often be the same as the input * * @return the output column names */ @Override public String[] outputColumnNames() { String[] ret = new String[derivedColumns.size()]; for (int i = 0; i < ret.length; i++) ret[i] = derivedColumns.get(i).columnName; return ret; } /** * Returns column names * this op is meant to run on * * @return */ @Override public String[] columnNames() { return new String[] {columnName()}; } /** * Returns a singular column name * this op is meant to run on * * @return */ @Override public String columnName() { return columnName; } public static class Builder { private final String columnName; private String insertAfter; private final List<DerivedColumn> derivedColumns = new ArrayList<>(); /** * @param timeColumnName The name of the time column from which to derive the new values */ public Builder(String timeColumnName) { this.columnName = timeColumnName; this.insertAfter = timeColumnName; } /** * Where should the new columns be inserted? * By default, they will be inserted after the source column * * @param columnName Name of the column to insert the derived columns after */ public Builder insertAfter(String columnName) { this.insertAfter = columnName; return this; } /** * Add a String column (for example, human readable format), derived from the time * * @param columnName Name of the new/derived column * @param format Joda time format, as per <a href="http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html">http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html</a> * @param timeZone Timezone to use for formatting */ public Builder addStringDerivedColumn(String columnName, String format, DateTimeZone timeZone) { derivedColumns.add(new DerivedColumn(columnName, ColumnType.String, format, timeZone, null)); return this; } /** * Add an integer derived column - for example, the hour of day, etc. Uses timezone from the time column metadata * * @param columnName Name of the column * @param type Type of field (for example, DateTimeFieldType.hourOfDay() etc) */ public Builder addIntegerDerivedColumn(String columnName, DateTimeFieldType type) { derivedColumns.add(new DerivedColumn(columnName, ColumnType.Integer, null, null, type)); return this; } /** * Create the transform instance */ public DeriveColumnsFromTimeTransform build() { return new DeriveColumnsFromTimeTransform(this); } } @JsonInclude(JsonInclude.Include.NON_NULL) @EqualsAndHashCode(exclude = "dateTimeFormatter") @Data @JsonIgnoreProperties({"dateTimeFormatter"}) public static class DerivedColumn implements Serializable { private final String columnName; private final ColumnType columnType; private final String format; private final DateTimeZone dateTimeZone; @JsonSerialize(using = DateTimeFieldTypeSerializer.class) @JsonDeserialize(using = DateTimeFieldTypeDeserializer.class) private final DateTimeFieldType fieldType; private transient DateTimeFormatter dateTimeFormatter; // public DerivedColumn(String columnName, ColumnType columnType, String format, DateTimeZone dateTimeZone, DateTimeFieldType fieldType) { public DerivedColumn(@JsonProperty("columnName") String columnName, @JsonProperty("columnType") ColumnType columnType, @JsonProperty("format") String format, @JsonProperty("dateTimeZone") DateTimeZone dateTimeZone, @JsonProperty("fieldType") DateTimeFieldType fieldType) { this.columnName = columnName; this.columnType = columnType; this.format = format; this.dateTimeZone = dateTimeZone; this.fieldType = fieldType; if (format != null) dateTimeFormatter = DateTimeFormat.forPattern(this.format).withZone(dateTimeZone); } @Override public String toString() { return "(name=" + columnName + ",type=" + columnType + ",derived=" + (format != null ? format : fieldType) + ")"; } //Custom serialization methods, because Joda Time doesn't allow DateTimeFormatter objects to be serialized :( private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); if (format != null) dateTimeFormatter = DateTimeFormat.forPattern(format).withZone(dateTimeZone); } } }
deeplearning4j/deeplearning4j
datavec/datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java
Java
apache-2.0
15,108
package com.humbinal.ssm.test; public class User { private long user_Id; private String user_name; private int user_age; public User() { } public long getUser_Id() { return user_Id; } public void setUser_Id(long user_Id) { this.user_Id = user_Id; } public String getUser_name() { return user_name; } public void setUser_name(String user_name) { this.user_name = user_name; } public int getUser_age() { return user_age; } public void setUser_age(int user_age) { this.user_age = user_age; } }
Humbinal/java-items
hum-web/hum-ssm/src/test/java/com/humbinal/ssm/test/User.java
Java
apache-2.0
616
/** Copyright (c) 2013 The Chromium Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. **/ require("../base/extension_registry.js"); require("./event.js"); require("./object_snapshot.js"); require("../base/range.js"); require("../base/sorted_array_utils.js"); 'use strict'; /** * @fileoverview Provides the ObjectSnapshot and ObjectHistory classes. */ global.tr.exportTo('tr.model', function() { var ObjectSnapshot = tr.model.ObjectSnapshot; /** * An object with a specific id, whose state has been snapshotted several * times. * * @constructor */ function ObjectInstance( parent, id, category, name, creationTs, opt_baseTypeName) { tr.model.Event.call(this); this.parent = parent; this.id = id; this.category = category; this.baseTypeName = opt_baseTypeName ? opt_baseTypeName : name; this.name = name; this.creationTs = creationTs; this.creationTsWasExplicit = false; this.deletionTs = Number.MAX_VALUE; this.deletionTsWasExplicit = false; this.colorId = 0; this.bounds = new tr.b.Range(); this.snapshots = []; this.hasImplicitSnapshots = false; } ObjectInstance.prototype = { __proto__: tr.model.Event.prototype, get typeName() { return this.name; }, addBoundsToRange: function(range) { range.addRange(this.bounds); }, addSnapshot: function(ts, args, opt_name, opt_baseTypeName) { if (ts < this.creationTs) throw new Error('Snapshots must be >= instance.creationTs'); if (ts >= this.deletionTs) throw new Error('Snapshots cannot be added after ' + 'an objects deletion timestamp.'); var lastSnapshot; if (this.snapshots.length > 0) { lastSnapshot = this.snapshots[this.snapshots.length - 1]; if (lastSnapshot.ts == ts) throw new Error('Snapshots already exists at this time!'); if (ts < lastSnapshot.ts) { throw new Error( 'Snapshots must be added in increasing timestamp order'); } } // Update baseTypeName if needed. if (opt_name && (this.name != opt_name)) { if (!opt_baseTypeName) throw new Error('Must provide base type name for name update'); if (this.baseTypeName != opt_baseTypeName) throw new Error('Cannot update type name: base types dont match'); this.name = opt_name; } var snapshotConstructor = tr.model.ObjectSnapshot.getConstructor( this.category, this.name); var snapshot = new snapshotConstructor(this, ts, args); this.snapshots.push(snapshot); return snapshot; }, wasDeleted: function(ts) { var lastSnapshot; if (this.snapshots.length > 0) { lastSnapshot = this.snapshots[this.snapshots.length - 1]; if (lastSnapshot.ts > ts) throw new Error( 'Instance cannot be deleted at ts=' + ts + '. A snapshot exists that is older.'); } this.deletionTs = ts; this.deletionTsWasExplicit = true; }, /** * See ObjectSnapshot constructor notes on object initialization. */ preInitialize: function() { for (var i = 0; i < this.snapshots.length; i++) this.snapshots[i].preInitialize(); }, /** * See ObjectSnapshot constructor notes on object initialization. */ initialize: function() { for (var i = 0; i < this.snapshots.length; i++) this.snapshots[i].initialize(); }, getSnapshotAt: function(ts) { if (ts < this.creationTs) { if (this.creationTsWasExplicit) throw new Error('ts must be within lifetime of this instance'); return this.snapshots[0]; } if (ts > this.deletionTs) throw new Error('ts must be within lifetime of this instance'); var snapshots = this.snapshots; var i = tr.b.findIndexInSortedIntervals( snapshots, function(snapshot) { return snapshot.ts; }, function(snapshot, i) { if (i == snapshots.length - 1) return snapshots[i].objectInstance.deletionTs; return snapshots[i + 1].ts - snapshots[i].ts; }, ts); if (i < 0) { // Note, this is a little bit sketchy: this lets early ts point at the // first snapshot, even before it is taken. We do this because raster // tasks usually post before their tile snapshots are dumped. This may // be a good line of code to re-visit if we start seeing strange and // confusing object references showing up in the traces. return this.snapshots[0]; } if (i >= this.snapshots.length) return this.snapshots[this.snapshots.length - 1]; return this.snapshots[i]; }, updateBounds: function() { this.bounds.reset(); this.bounds.addValue(this.creationTs); if (this.deletionTs != Number.MAX_VALUE) this.bounds.addValue(this.deletionTs); else if (this.snapshots.length > 0) this.bounds.addValue(this.snapshots[this.snapshots.length - 1].ts); }, shiftTimestampsForward: function(amount) { this.creationTs += amount; if (this.deletionTs != Number.MAX_VALUE) this.deletionTs += amount; this.snapshots.forEach(function(snapshot) { snapshot.ts += amount; }); }, get userFriendlyName() { return this.typeName + ' object ' + this.id; } }; tr.model.EventRegistry.register( ObjectInstance, { name: 'objectInstance', pluralName: 'objectInstances', singleViewElementName: 'tr-ui-a-single-object-instance-sub-view', multiViewElementName: 'tr-ui-a-multi-object-sub-view' }); var options = new tr.b.ExtensionRegistryOptions( tr.b.TYPE_BASED_REGISTRY_MODE); options.mandatoryBaseClass = ObjectInstance; options.defaultConstructor = ObjectInstance; tr.b.decorateExtensionRegistry(ObjectInstance, options); return { ObjectInstance: ObjectInstance }; });
googlearchive/node-big-rig
lib/third_party/tracing/model/object_instance.js
JavaScript
apache-2.0
6,147
// Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package graphx import ( "encoding/json" "fmt" "github.com/apache/beam/sdks/go/pkg/beam/core/graph/coder" "github.com/apache/beam/sdks/go/pkg/beam/core/runtime/graphx/v1" "github.com/apache/beam/sdks/go/pkg/beam/core/typex" "github.com/apache/beam/sdks/go/pkg/beam/core/util/protox" pb "github.com/apache/beam/sdks/go/pkg/beam/model/pipeline_v1" "github.com/golang/protobuf/proto" ) const ( // Model constants urnBytesCoder = "beam:coder:bytes:v1" urnVarIntCoder = "beam:coder:varint:v1" urnLengthPrefixCoder = "beam:coder:length_prefix:v1" urnKVCoder = "beam:coder:kv:v1" urnIterableCoder = "beam:coder:iterable:v1" urnWindowedValueCoder = "beam:coder:windowed_value:v1" urnGlobalWindow = "beam:coder:global_window:v1" urnIntervalWindow = "beam:coder:interval_window:v1" // SDK constants urnCustomCoder = "beam:go:coder:custom:v1" urnCoGBKList = "beam:go:coder:cogbklist:v1" // CoGBK representation. Not a coder. ) // MarshalCoders marshals a list of coders into model coders. func MarshalCoders(coders []*coder.Coder) ([]string, map[string]*pb.Coder) { b := NewCoderMarshaller() ids := b.AddMulti(coders) return ids, b.Build() } // UnmarshalCoders unmarshals coders. func UnmarshalCoders(ids []string, m map[string]*pb.Coder) ([]*coder.Coder, error) { b := NewCoderUnmarshaller(m) var coders []*coder.Coder for _, id := range ids { c, err := b.Coder(id) if err != nil { return nil, fmt.Errorf("failed to unmarshal coder %v: %v", id, err) } coders = append(coders, c) } return coders, nil } // CoderUnmarshaller is an incremental unmarshaller of model coders. Identical // coders are shared. type CoderUnmarshaller struct { models map[string]*pb.Coder coders map[string]*coder.Coder windowCoders map[string]*coder.WindowCoder } // NewCoderUnmarshaller returns a new CoderUnmarshaller. func NewCoderUnmarshaller(m map[string]*pb.Coder) *CoderUnmarshaller { return &CoderUnmarshaller{ models: m, coders: make(map[string]*coder.Coder), windowCoders: make(map[string]*coder.WindowCoder), } } func (b *CoderUnmarshaller) Coders(ids []string) ([]*coder.Coder, error) { coders := make([]*coder.Coder, len(ids)) for i, id := range ids { c, err := b.Coder(id) if err != nil { return nil, err } coders[i] = c } return coders, nil } // Coder unmarshals a coder with the given id. func (b *CoderUnmarshaller) Coder(id string) (*coder.Coder, error) { if c, exists := b.coders[id]; exists { return c, nil } c, ok := b.models[id] if !ok { return nil, fmt.Errorf("coder with id %v not found", id) } ret, err := b.makeCoder(c) if err != nil { return nil, fmt.Errorf("failed to unmarshal coder %v: %v", id, err) } b.coders[id] = ret return ret, nil } // WindowCoder unmarshals a window coder with the given id. func (b *CoderUnmarshaller) WindowCoder(id string) (*coder.WindowCoder, error) { if w, exists := b.windowCoders[id]; exists { return w, nil } c, err := b.peek(id) if err != nil { return nil, err } w := urnToWindowCoder(c.GetSpec().GetSpec().GetUrn()) b.windowCoders[id] = w return w, nil } func urnToWindowCoder(urn string) *coder.WindowCoder { switch urn { case urnGlobalWindow: return coder.NewGlobalWindow() case urnIntervalWindow: return coder.NewIntervalWindow() default: panic(fmt.Sprintf("Unexpected window coder: %v", urn)) } } func (b *CoderUnmarshaller) makeCoder(c *pb.Coder) (*coder.Coder, error) { urn := c.GetSpec().GetSpec().GetUrn() components := c.GetComponentCoderIds() switch urn { case urnBytesCoder: return coder.NewBytes(), nil case urnVarIntCoder: return coder.NewVarInt(), nil case urnKVCoder: if len(components) != 2 { return nil, fmt.Errorf("bad pair: %v", c) } key, err := b.Coder(components[0]) if err != nil { return nil, err } id := components[1] kind := coder.KV root := typex.KVType elm, err := b.peek(id) if err != nil { return nil, err } isGBK := elm.GetSpec().GetSpec().GetUrn() == urnIterableCoder if isGBK { id = elm.GetComponentCoderIds()[0] kind = coder.CoGBK root = typex.CoGBKType // TODO(BEAM-490): If CoGBK with > 1 input, handle as special GBK. We expect // it to be encoded as CoGBK<K,LP<CoGBKList<V,W,..>>>. Remove this handling once // CoGBK has a first-class representation. if ids, ok := b.isCoGBKList(id); ok { // CoGBK<K,V,W,..> values, err := b.Coders(ids) if err != nil { return nil, err } t := typex.New(root, append([]typex.FullType{key.T}, coder.Types(values)...)...) return &coder.Coder{Kind: kind, T: t, Components: append([]*coder.Coder{key}, values...)}, nil } } value, err := b.Coder(id) if err != nil { return nil, err } t := typex.New(root, key.T, value.T) return &coder.Coder{Kind: kind, T: t, Components: []*coder.Coder{key, value}}, nil case urnLengthPrefixCoder: if len(components) != 1 { return nil, fmt.Errorf("bad length prefix: %v", c) } elm, err := b.peek(components[0]) if err != nil { return nil, err } if elm.GetSpec().GetSpec().GetUrn() != urnCustomCoder { // TODO(herohde) 11/17/2017: revisit this restriction return nil, fmt.Errorf("expected length prefix of custom coder only: %v", elm) } var ref v1.CustomCoder if err := protox.DecodeBase64(string(elm.GetSpec().GetSpec().GetPayload()), &ref); err != nil { return nil, err } custom, err := decodeCustomCoder(&ref) if err != nil { return nil, err } t := typex.New(custom.Type) return &coder.Coder{Kind: coder.Custom, T: t, Custom: custom}, nil case urnWindowedValueCoder: if len(components) != 2 { return nil, fmt.Errorf("bad windowed value: %v", c) } elm, err := b.Coder(components[0]) if err != nil { return nil, err } w, err := b.WindowCoder(components[1]) if err != nil { return nil, err } t := typex.New(typex.WindowedValueType, elm.T) return &coder.Coder{Kind: coder.WindowedValue, T: t, Components: []*coder.Coder{elm}, Window: w}, nil case streamType: return nil, fmt.Errorf("stream must be pair value: %v", c) case "": // TODO(herohde) 11/27/2017: we still see CoderRefs from Dataflow. Handle that // case here, for now, so that the harness can use this logic. payload := c.GetSpec().GetSpec().GetPayload() var ref CoderRef if err := json.Unmarshal(payload, &ref); err != nil { return nil, fmt.Errorf("failed to decode urn-less coder payload \"%v\": %v", string(payload), err) } c, err := DecodeCoderRef(&ref) if err != nil { return nil, fmt.Errorf("failed to translate coder \"%v\": %v", string(payload), err) } return c, nil default: return nil, fmt.Errorf("custom coders must be length prefixed: %v", c) } } func (b *CoderUnmarshaller) peek(id string) (*pb.Coder, error) { c, ok := b.models[id] if !ok { return nil, fmt.Errorf("coder with id %v not found", id) } return c, nil } func (b *CoderUnmarshaller) isCoGBKList(id string) ([]string, bool) { elm, err := b.peek(id) if err != nil { return nil, false } if elm.GetSpec().GetSpec().GetUrn() != urnLengthPrefixCoder { return nil, false } elm2, err := b.peek(elm.GetComponentCoderIds()[0]) if err != nil { return nil, false } if elm2.GetSpec().GetSpec().GetUrn() != urnCoGBKList { return nil, false } return elm2.GetComponentCoderIds(), true } // CoderMarshaller incrementally builds a compact model representation of a set // of coders. Identical coders are shared. type CoderMarshaller struct { coders map[string]*pb.Coder coder2id map[string]string // index of serialized coders to id to deduplicate } // NewCoderMarshaller returns a new CoderMarshaller. func NewCoderMarshaller() *CoderMarshaller { return &CoderMarshaller{ coders: make(map[string]*pb.Coder), coder2id: make(map[string]string), } } // Add adds the given coder to the set and returns its id. Idempotent. func (b *CoderMarshaller) Add(c *coder.Coder) string { switch c.Kind { case coder.Custom: ref, err := encodeCustomCoder(c.Custom) if err != nil { panic(fmt.Sprintf("failed to encode custom coder: %v", err)) } data, err := protox.EncodeBase64(ref) if err != nil { panic(fmt.Sprintf("failed to marshal custom coder: %v", err)) } inner := b.internCoder(&pb.Coder{ Spec: &pb.SdkFunctionSpec{ Spec: &pb.FunctionSpec{ Urn: urnCustomCoder, Payload: []byte(data), }, // TODO(BEAM-3204): coders should not have environments. }, }) return b.internBuiltInCoder(urnLengthPrefixCoder, inner) case coder.KV: comp := b.AddMulti(c.Components) return b.internBuiltInCoder(urnKVCoder, comp...) case coder.CoGBK: comp := b.AddMulti(c.Components) value := comp[1] if len(comp) > 2 { // TODO(BEAM-490): don't inject union coder for CoGBK. union := b.internBuiltInCoder(urnCoGBKList, comp[1:]...) value = b.internBuiltInCoder(urnLengthPrefixCoder, union) } stream := b.internBuiltInCoder(urnIterableCoder, value) return b.internBuiltInCoder(urnKVCoder, comp[0], stream) case coder.WindowedValue: comp := b.AddMulti(c.Components) comp = append(comp, b.AddWindowCoder(c.Window)) return b.internBuiltInCoder(urnWindowedValueCoder, comp...) case coder.Bytes: // TODO(herohde) 6/27/2017: add length-prefix and not assume nested by context? return b.internBuiltInCoder(urnBytesCoder) case coder.VarInt: return b.internBuiltInCoder(urnVarIntCoder) default: panic(fmt.Sprintf("Unexpected coder kind: %v", c.Kind)) } } // AddMulti adds the given coders to the set and returns their ids. Idempotent. func (b *CoderMarshaller) AddMulti(list []*coder.Coder) []string { var ids []string for _, c := range list { ids = append(ids, b.Add(c)) } return ids } // AddWindowCoder adds a window coder. func (b *CoderMarshaller) AddWindowCoder(w *coder.WindowCoder) string { switch w.Kind { case coder.GlobalWindow: return b.internBuiltInCoder(urnGlobalWindow) case coder.IntervalWindow: return b.internBuiltInCoder(urnIntervalWindow) default: panic(fmt.Sprintf("Unexpected window kind: %v", w.Kind)) } } // Build returns the set of model coders. Note that the map may be larger // than the number of coders added, because component coders are included. func (b *CoderMarshaller) Build() map[string]*pb.Coder { return b.coders } func (b *CoderMarshaller) internBuiltInCoder(urn string, components ...string) string { return b.internCoder(&pb.Coder{ Spec: &pb.SdkFunctionSpec{ Spec: &pb.FunctionSpec{ Urn: urn, }, }, ComponentCoderIds: components, }) } func (b *CoderMarshaller) internCoder(coder *pb.Coder) string { key := proto.MarshalTextString(coder) if id, exists := b.coder2id[key]; exists { return id } id := fmt.Sprintf("c%v", len(b.coder2id)) b.coder2id[key] = id b.coders[id] = coder return id }
tgroh/incubator-beam
sdks/go/pkg/beam/core/runtime/graphx/coder.go
GO
apache-2.0
11,649
// Copyright 2004 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry.vlib.ejb.impl; import java.rmi.RemoteException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ejb.CreateException; import javax.ejb.FinderException; import javax.ejb.RemoveException; import javax.ejb.SessionBean; import javax.ejb.SessionContext; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.rmi.PortableRemoteObject; import javax.sql.DataSource; import org.apache.tapestry.Tapestry; import org.apache.tapestry.contrib.ejb.XCreateException; import org.apache.tapestry.contrib.ejb.XEJBException; import org.apache.tapestry.contrib.ejb.XRemoveException; import org.apache.tapestry.contrib.jdbc.IStatement; import org.apache.tapestry.contrib.jdbc.StatementAssembly; import org.apache.tapestry.vlib.ejb.Book; import org.apache.tapestry.vlib.ejb.BorrowException; import org.apache.tapestry.vlib.ejb.IBook; import org.apache.tapestry.vlib.ejb.IBookHome; import org.apache.tapestry.vlib.ejb.IPerson; import org.apache.tapestry.vlib.ejb.IPersonHome; import org.apache.tapestry.vlib.ejb.IPublisher; import org.apache.tapestry.vlib.ejb.IPublisherHome; import org.apache.tapestry.vlib.ejb.LoginException; import org.apache.tapestry.vlib.ejb.Person; import org.apache.tapestry.vlib.ejb.Publisher; import org.apache.tapestry.vlib.ejb.RegistrationException; import org.apache.tapestry.vlib.ejb.SortColumn; import org.apache.tapestry.vlib.ejb.SortOrdering; /** * Implementation of the {@link org.apache.tapestry.vlib.ejb.IOperations} * stateless session bean. * * <p>Implenents a number of stateless operations for the front end. * * @version $Id$ * @author Howard Lewis Ship * **/ public class OperationsBean implements SessionBean { private SessionContext _context; private transient Context _environment; private transient IBookHome _bookHome; private transient IPersonHome _personHome; private transient IPublisherHome _publisherHome; /** * Data source, retrieved from the ENC property * "jdbc/dataSource". * **/ private transient DataSource _dataSource; /** * Sets up the bean. Locates the {@link DataSource} for the bean * as <code>jdbc/dataSource</code> within the ENC; this data source is * later used by {@link #getConnection()}. * **/ public void ejbCreate() { Context initial; try { initial = new InitialContext(); _environment = (Context) initial.lookup("java:comp/env"); } catch (NamingException e) { throw new XEJBException("Could not lookup environment.", e); } try { _dataSource = (DataSource) _environment.lookup("jdbc/dataSource"); } catch (NamingException e) { e.printStackTrace(); throw new XEJBException("Could not lookup data source.", e); } } public void ejbRemove() { } /** * Does nothing, not invoked in stateless session beans. **/ public void ejbPassivate() { } public void setSessionContext(SessionContext value) { _context = value; } /** * Does nothing, not invoked in stateless session beans. * **/ public void ejbActivate() { } /** * Finds the book and borrower (by thier primary keys) and updates the book. * * <p>The {@link Book} value object is returned. * **/ public Book borrowBook(Integer bookId, Integer borrowerId) throws FinderException, RemoteException, BorrowException { IBookHome bookHome = getBookHome(); IPersonHome personHome = getPersonHome(); IBook book = bookHome.findByPrimaryKey(bookId); if (!book.getLendable()) throw new BorrowException("Book may not be borrowed."); // Verify that the borrower exists. personHome.findByPrimaryKey(borrowerId); // TBD: Check that borrower has authenticated // findByPrimaryKey() throws an exception if the EJB doesn't exist, // so we're safe. personHome.findByPrimaryKey(book.getOwnerId()); // Here's the real work; just setting the holder of the book // to be the borrower. book.setHolderId(borrowerId); return getBook(bookId); } /** * Adds a new book, verifying that the publisher and holder actually exist. * **/ public Integer addBook(Map attributes) throws CreateException, RemoteException { IBookHome home = getBookHome(); attributes.put("dateAdded", new Timestamp(System.currentTimeMillis())); IBook book = home.create(attributes); return (Integer) book.getPrimaryKey(); } /** * Adds a book, which will be owned and held by the specified owner. * * <p>The publisherName may either be the name of a known publisher, or * a new name. A new {@link IPublisher} will be created as necessary. * * <p>Returns the newly created book, as a {@link Map} of attributes. * **/ public Integer addBook(Map attributes, String publisherName) throws CreateException, RemoteException { IPublisher publisher = null; IPublisherHome publisherHome = getPublisherHome(); // Find or create the publisher. try { publisher = publisherHome.findByName(publisherName); } catch (FinderException e) { // Ignore, means that no publisher with the given name already exists. } if (publisher == null) publisher = publisherHome.create(publisherName); attributes.put("publisherId", publisher.getPrimaryKey()); return addBook(attributes); } /** * Updates a book. * * <p>Returns the updated book. * * @param bookId The primary key of the book to update. * **/ public void updateBook(Integer bookId, Map attributes) throws FinderException, RemoteException { IBookHome bookHome = getBookHome(); IBook book = bookHome.findByPrimaryKey(bookId); book.updateEntityAttributes(attributes); } /** * Updates a book, adding a new Publisher at the same time. * * * @param bookPK The primary key of the book to update. * @param attributes attributes to change * @param publisherName The name of the new publisher. * @throws FinderException if the book, holder or publisher can not be located. * @throws CreateException if the {@link IPublisher} can not be created. **/ public void updateBook(Integer bookId, Map attributes, String publisherName) throws CreateException, FinderException, RemoteException { IPublisher publisher = null; IPublisherHome publisherHome = getPublisherHome(); try { publisher = publisherHome.findByName(publisherName); } catch (FinderException e) { // Ignore, means we need to create the Publisher } if (publisher == null) publisher = publisherHome.create(publisherName); // Don't duplicate all that other code! attributes.put("publisherId", publisher.getPrimaryKey()); updateBook(bookId, attributes); } public void updatePerson(Integer personId, Map attributes) throws FinderException, RemoteException { IPersonHome home = getPersonHome(); IPerson person = home.findByPrimaryKey(personId); person.updateEntityAttributes(attributes); } public Publisher[] getPublishers() { Connection connection = null; IStatement statement = null; ResultSet set = null; List list = new ArrayList(); try { connection = getConnection(); StatementAssembly assembly = new StatementAssembly(); assembly.newLine("SELECT PUBLISHER_ID, NAME"); assembly.newLine("FROM PUBLISHER"); assembly.newLine("ORDER BY NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); while (set.next()) { Integer primaryKey = (Integer) set.getObject(1); String name = set.getString(2); list.add(new Publisher(primaryKey, name)); } } catch (SQLException ex) { ex.printStackTrace(); throw new XEJBException("Could not fetch all Publishers.", ex); } finally { close(connection, statement, set); } // Convert from List to Publisher[] return (Publisher[]) list.toArray(new Publisher[list.size()]); } /** * Fetchs all {@link IPerson} beans in the database and converts them * to {@link Person} objects. * * Returns the {@link Person}s sorted by last name, then first. **/ public Person[] getPersons() { Connection connection = null; IStatement statement = null; ResultSet set = null; List list = new ArrayList(); try { connection = getConnection(); StatementAssembly assembly = buildBasePersonQuery(); assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); Object[] columns = new Object[Person.N_COLUMNS]; while (set.next()) { list.add(convertRowToPerson(set, columns)); } } catch (SQLException ex) { throw new XEJBException("Could not fetch all Persons.", ex); } finally { close(connection, statement, set); } return (Person[]) list.toArray(new Person[list.size()]); } /** * Gets the {@link Person} for primary key. * * @throws FinderException if the Person does not exist. **/ public Person getPerson(Integer personId) throws FinderException { Connection connection = null; IStatement statement = null; ResultSet set = null; Person result = null; try { connection = getConnection(); StatementAssembly assembly = buildBasePersonQuery(); assembly.newLine("WHERE "); assembly.add("PERSON_ID = "); assembly.addParameter(personId); assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (!set.next()) throw new FinderException("Person #" + personId + " does not exist."); Object[] columns = new Object[Person.N_COLUMNS]; result = convertRowToPerson(set, columns); } catch (SQLException ex) { throw new XEJBException("Unable to perform database query.", ex); } finally { close(connection, statement, set); } return result; } public Person login(String email, String password) throws RemoteException, LoginException { IPersonHome home = getPersonHome(); IPerson person = null; Person result = null; try { person = home.findByEmail(email); } catch (FinderException ex) { throw new LoginException("Unknown e-mail address.", false); } if (!person.getPassword().equals(password)) throw new LoginException("Invalid password.", true); try { result = getPerson((Integer) person.getPrimaryKey()); } catch (FinderException ex) { throw new LoginException("Could not read person.", false); } if (result.isLockedOut()) throw new LoginException("You have been locked out of the Virtual Library.", false); // Set the last access time for any subsequent login. person.setLastAccess(new Timestamp(System.currentTimeMillis())); return result; } public Map getPersonAttributes(Integer personId) throws FinderException, RemoteException { IPersonHome home = getPersonHome(); IPerson person = home.findByPrimaryKey(personId); return person.getEntityAttributes(); } /** * Retrieves a single {@link Book} by its primary key. * * @throws FinderException if the Book does not exist. * **/ public Book getBook(Integer bookId) throws FinderException { Connection connection = null; IStatement statement = null; ResultSet set = null; Book result = null; try { connection = getConnection(); StatementAssembly assembly = buildBaseBookQuery(); assembly.addSep(" AND "); assembly.add("book.BOOK_ID = "); assembly.addParameter(bookId); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (!set.next()) throw new FinderException("Book " + bookId + " does not exist."); Object[] columns = new Object[Book.N_COLUMNS]; result = convertRowToBook(set, columns); } catch (SQLException ex) { throw new XEJBException("Unable to perform database query.", ex); } finally { close(connection, statement, set); } return result; } public Map getBookAttributes(Integer bookId) throws FinderException, RemoteException { IBookHome home = getBookHome(); IBook book = home.findByPrimaryKey(bookId); return book.getEntityAttributes(); } /** * Attempts to register a new user, first checking that the * e-mail and names are unique. Returns the primary key of the * new {@link IPerson}. * **/ public Person registerNewUser(String firstName, String lastName, String email, String password) throws RegistrationException, CreateException, RemoteException { IPersonHome home; if (password == null || password.trim().length() == 0) throw new RegistrationException("Must specify a password."); validateUniquePerson(firstName, lastName, email); home = getPersonHome(); Map attributes = new HashMap(); attributes.put("lastName", lastName.trim()); attributes.put("firstName", firstName.trim()); attributes.put("email", email.trim()); attributes.put("password", password.trim()); attributes.put("lastAccess", new Timestamp(System.currentTimeMillis())); IPerson person = home.create(attributes); Integer personId = (Integer) person.getPrimaryKey(); try { return getPerson(personId); } catch (FinderException ex) { throw new XCreateException("Unable to find newly created Person.", ex); } } public Book deleteBook(Integer bookId) throws RemoveException, RemoteException { IBookHome home = getBookHome(); Book result = null; try { result = getBook(bookId); } catch (FinderException ex) { throw new XRemoveException(ex); } home.remove(bookId); return result; } /** * Transfers a number of books to a new owner. * **/ public void transferBooks(Integer newOwnerId, Integer[] bookIds) throws FinderException, RemoteException { if (bookIds == null) throw new RemoteException("Must supply non-null list of books to transfer."); if (newOwnerId == null) throw new RemoteException("Must provide an owner for the books."); // Verify that the new owner exists. IPersonHome personHome = getPersonHome(); personHome.findByPrimaryKey(newOwnerId); // Direct SQL would be more efficient, but this'll probably do. IBookHome home = getBookHome(); for (int i = 0; i < bookIds.length; i++) { IBook book = home.findByPrimaryKey(bookIds[i]); book.setOwnerId(newOwnerId); } } public void updatePublishers(Publisher[] updated, Integer[] deleted) throws FinderException, RemoveException, RemoteException { IPublisherHome home = getPublisherHome(); if (updated != null) { for (int i = 0; i < updated.length; i++) { IPublisher publisher = home.findByPrimaryKey(updated[i].getId()); publisher.setName(updated[i].getName()); } } if (deleted != null) { for (int i = 0; i < deleted.length; i++) { home.remove(deleted[i]); } } } public void updatePersons( Person[] updated, Integer[] resetPassword, String newPassword, Integer[] deleted, Integer adminId) throws FinderException, RemoveException, RemoteException { IPersonHome home = getPersonHome(); int count = Tapestry.size(updated); for (int i = 0; i < count; i++) { Person u = updated[i]; IPerson person = home.findByPrimaryKey(u.getId()); person.setAdmin(u.isAdmin()); person.setLockedOut(u.isLockedOut()); } count = Tapestry.size(resetPassword); for (int i = 0; i < count; i++) { IPerson person = home.findByPrimaryKey(resetPassword[i]); person.setPassword(newPassword); } count = Tapestry.size(deleted); if (count > 0) { returnBooksFromDeletedPersons(deleted); moveBooksFromDeletedPersons(deleted, adminId); } for (int i = 0; i < count; i++) home.remove(deleted[i]); } /** * Invoked to update all books owned by people about to be deleted, to * reassign the books holder back to the owner. * **/ private void returnBooksFromDeletedPersons(Integer deletedPersonIds[]) throws RemoveException { StatementAssembly assembly = new StatementAssembly(); assembly.add("UPDATE BOOK"); assembly.newLine("SET HOLDER_ID = OWNER_ID"); assembly.newLine("WHERE HOLDER_ID IN ("); assembly.addParameterList(deletedPersonIds, ", "); assembly.add(")"); executeUpdate(assembly); } /** * Invoked to execute a bulk update that moves books to the new admin. * **/ private void moveBooksFromDeletedPersons(Integer deletedPersonIds[], Integer adminId) throws RemoveException { StatementAssembly assembly = new StatementAssembly(); assembly.add("UPDATE BOOK"); assembly.newLine("SET OWNER_ID = "); assembly.addParameter(adminId); assembly.newLine("WHERE OWNER_ID IN ("); assembly.addParameterList(deletedPersonIds, ", "); assembly.add(")"); executeUpdate(assembly); } private void executeUpdate(StatementAssembly assembly) throws XRemoveException { Connection connection = null; IStatement statement = null; try { connection = getConnection(); statement = assembly.createStatement(connection); statement.executeUpdate(); statement.close(); statement = null; connection.close(); connection = null; } catch (SQLException ex) { throw new XRemoveException( "Unable to execute " + assembly + ": " + ex.getMessage(), ex); } finally { close(connection, statement, null); } } /** * Translates the next row from the result set into a {@link Book}. * * <p>This works with queries generated by {@link #buildBaseBookQuery()}. * **/ protected Book convertRowToBook(ResultSet set, Object[] columns) throws SQLException { int column = 1; columns[Book.ID_COLUMN] = set.getObject(column++); columns[Book.TITLE_COLUMN] = set.getString(column++); columns[Book.DESCRIPTION_COLUMN] = set.getString(column++); columns[Book.ISBN_COLUMN] = set.getString(column++); columns[Book.OWNER_ID_COLUMN] = set.getObject(column++); columns[Book.OWNER_NAME_COLUMN] = buildName(set.getString(column++), set.getString(column++)); columns[Book.HOLDER_ID_COLUMN] = set.getObject(column++); columns[Book.HOLDER_NAME_COLUMN] = buildName(set.getString(column++), set.getString(column++)); columns[Book.PUBLISHER_ID_COLUMN] = set.getObject(column++); columns[Book.PUBLISHER_NAME_COLUMN] = set.getString(column++); columns[Book.AUTHOR_COLUMN] = set.getString(column++); columns[Book.HIDDEN_COLUMN] = getBoolean(set, column++); columns[Book.LENDABLE_COLUMN] = getBoolean(set, column++); columns[Book.DATE_ADDED_COLUMN] = set.getTimestamp(column++); return new Book(columns); } private String buildName(String firstName, String lastName) { if (firstName == null) return lastName; return firstName + " " + lastName; } /** * All queries must use this exact set of select columns, so that * {@link #convertRow(ResultSet, Object[])} can build * the correct {@link Book} from each row. * **/ private static final String[] BOOK_SELECT_COLUMNS = { "book.BOOK_ID", "book.TITLE", "book.DESCRIPTION", "book.ISBN", "owner.PERSON_ID", "owner.FIRST_NAME", "owner.LAST_NAME", "holder.PERSON_ID", "holder.FIRST_NAME", "holder.LAST_NAME", "publisher.PUBLISHER_ID", "publisher.NAME", "book.AUTHOR", "book.HIDDEN", "book.LENDABLE", "book.DATE_ADDED" }; private static final String[] BOOK_ALIAS_COLUMNS = { "BOOK book", "PERSON owner", "PERSON holder", "PUBLISHER publisher" }; private static final String[] BOOK_JOINS = { "book.OWNER_ID = owner.PERSON_ID", "book.HOLDER_ID = holder.PERSON_ID", "book.PUBLISHER_ID = publisher.PUBLISHER_ID" }; private static final Map BOOK_SORT_ASCENDING = new HashMap(); private static final Map BOOK_SORT_DESCENDING = new HashMap(); static { BOOK_SORT_ASCENDING.put(SortColumn.TITLE, "book.TITLE"); BOOK_SORT_ASCENDING.put(SortColumn.HOLDER, "holder.LAST_NAME, holder.FIRST_NAME"); BOOK_SORT_ASCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME, owner.LAST_NAME"); BOOK_SORT_ASCENDING.put(SortColumn.PUBLISHER, "publisher.NAME"); BOOK_SORT_ASCENDING.put(SortColumn.AUTHOR, "book.AUTHOR"); BOOK_SORT_DESCENDING.put(SortColumn.TITLE, "book.TITLE DESC"); BOOK_SORT_DESCENDING.put( SortColumn.HOLDER, "holder.LAST_NAME DESC, holder.FIRST_NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME DESC, owner.LAST_NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.PUBLISHER, "publisher.NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.AUTHOR, "book.AUTHOR DESC"); } protected StatementAssembly buildBaseBookQuery() { StatementAssembly result = new StatementAssembly(); result.newLine("SELECT "); result.addList(BOOK_SELECT_COLUMNS, ", "); result.newLine("FROM "); result.addList(BOOK_ALIAS_COLUMNS, ", "); result.newLine("WHERE "); result.addList(BOOK_JOINS, " AND "); return result; } /** * Adds a sort ordering clause to the statement. If ordering is null, * orders by book title. * * @param assembly to update * @param ordering defines the column to sort on, and the order (ascending or descending) * @since 3.0 * * **/ protected void addSortOrdering(StatementAssembly assembly, SortOrdering ordering) { if (ordering == null) { assembly.newLine("ORDER BY book.TITLE"); return; } Map sorts = ordering.isDescending() ? BOOK_SORT_DESCENDING : BOOK_SORT_ASCENDING; String term = (String) sorts.get(ordering.getColumn()); assembly.newLine("ORDER BY "); assembly.add(term); } protected void addSubstringSearch(StatementAssembly assembly, String column, String value) { if (value == null) return; String trimmed = value.trim(); if (trimmed.length() == 0) return; // Here's the McKoi dependency: LOWER() is a database-specific // SQL function. assembly.addSep(" AND LOWER("); assembly.add(column); assembly.add(") LIKE"); assembly.addParameter("%" + trimmed.toLowerCase() + "%"); } /** * Closes the resultSet (if not null), then the statement (if not null), * then the Connection (if not null). Exceptions are written to System.out. * **/ protected void close(Connection connection, IStatement statement, ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { System.out.println("Exception closing result set."); ex.printStackTrace(); } } if (statement != null) { try { statement.close(); } catch (SQLException ex) { System.out.println("Exception closing statement."); ex.printStackTrace(); } } if (connection != null) { try { connection.close(); } catch (SQLException ex) { System.out.println("Exception closing connection."); ex.printStackTrace(); } } } private IPersonHome getPersonHome() { if (_personHome == null) { try { Object raw = _environment.lookup("ejb/Person"); _personHome = (IPersonHome) PortableRemoteObject.narrow(raw, IPersonHome.class); } catch (NamingException ex) { throw new XEJBException("Could not lookup Person home interface.", ex); } } return _personHome; } private IPublisherHome getPublisherHome() { if (_publisherHome == null) { try { Object raw = _environment.lookup("ejb/Publisher"); _publisherHome = (IPublisherHome) PortableRemoteObject.narrow(raw, IPublisherHome.class); } catch (NamingException e) { throw new XEJBException("Could not lookup Publisher home interface.", e); } } return _publisherHome; } private IBookHome getBookHome() { if (_bookHome == null) { try { Object raw = _environment.lookup("ejb/Book"); _bookHome = (IBookHome) PortableRemoteObject.narrow(raw, IBookHome.class); } catch (NamingException e) { throw new XEJBException("Could not lookup Book home interface.", e); } } return _bookHome; } /** * Gets a new connection from the data source. * **/ protected Connection getConnection() { try { return _dataSource.getConnection(); } catch (SQLException e) { throw new XEJBException("Unable to get database connection from pool.", e); } } protected StatementAssembly buildBasePersonQuery() { StatementAssembly result; result = new StatementAssembly(); result.newLine("SELECT PERSON_ID, FIRST_NAME, LAST_NAME, EMAIL, "); result.newLine(" LOCKED_OUT, ADMIN, LAST_ACCESS"); result.newLine("FROM PERSON"); return result; } /** * Translates the next row from the result set into a {@link Person}. * * <p>This works with queries generated by {@link #buildBasePersonQuery()}. * **/ protected Person convertRowToPerson(ResultSet set, Object[] columns) throws SQLException { int column = 1; columns[Person.ID_COLUMN] = set.getObject(column++); columns[Person.FIRST_NAME_COLUMN] = set.getString(column++); columns[Person.LAST_NAME_COLUMN] = set.getString(column++); columns[Person.EMAIL_COLUMN] = set.getString(column++); columns[Person.LOCKED_OUT_COLUMN] = getBoolean(set, column++); columns[Person.ADMIN_COLUMN] = getBoolean(set, column++); columns[Person.LAST_ACCESS_COLUMN] = set.getTimestamp(column++); return new Person(columns); } private Boolean getBoolean(ResultSet set, int index) throws SQLException { return set.getBoolean(index) ? Boolean.TRUE : Boolean.FALSE; } private void validateUniquePerson(String firstName, String lastName, String email) throws RegistrationException { Connection connection = null; IStatement statement = null; ResultSet set = null; String trimmedEmail = email.trim().toLowerCase(); String trimmedLastName = lastName.trim().toLowerCase(); String trimmedFirstName = firstName.trim().toLowerCase(); try { connection = getConnection(); StatementAssembly assembly = new StatementAssembly(); assembly.newLine("SELECT PERSON_ID"); assembly.newLine("FROM PERSON"); assembly.newLine("WHERE "); assembly.add("LOWER(EMAIL) = "); assembly.addParameter(trimmedEmail); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (set.next()) throw new RegistrationException("Email address is already in use by another user."); close(null, statement, set); assembly = new StatementAssembly(); assembly.newLine("SELECT PERSON_ID"); assembly.newLine("FROM PERSON"); assembly.newLine("WHERE "); assembly.add("LOWER(FIRST_NAME) = "); assembly.addParameter(trimmedFirstName); assembly.addSep(" AND "); assembly.add("LOWER(LAST_NAME) = "); assembly.addParameter(trimmedLastName); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (set.next()) throw new RegistrationException("Name provided is already in use by another user."); } catch (SQLException e) { throw new RegistrationException("Could not access database: " + e.getMessage(), e); } finally { close(connection, statement, set); } } public Book returnBook(Integer bookId) throws RemoteException, FinderException { IBookHome bookHome = getBookHome(); IBook book = bookHome.findByPrimaryKey(bookId); Integer ownerPK = book.getOwnerId(); book.setHolderId(ownerPK); return getBook(bookId); } }
apache/tapestry3
tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/impl/OperationsBean.java
Java
apache-2.0
33,019
/* * Copyright © 2014 - 2018 Leipzig University (Database Research Group) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Contains implementations graph pattern matching on a single input graph. */ package org.gradoop.flink.model.impl.operators.matching.transactional.function;
niklasteichmann/gradoop
gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/transactional/function/package-info.java
Java
apache-2.0
802
package org.sakaiproject.scorm.ui.player.behaviors; import org.adl.api.ecmascript.SCORM13APIInterface; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.scorm.model.api.ScoBean; import org.sakaiproject.scorm.model.api.SessionBean; import org.sakaiproject.scorm.navigation.INavigable; import org.sakaiproject.scorm.navigation.INavigationEvent; import org.sakaiproject.scorm.service.api.ScormApplicationService; import org.sakaiproject.scorm.service.api.ScormSequencingService; public abstract class SCORM13API implements SCORM13APIInterface { private static Log log = LogFactory.getLog(SCORM13API.class); // String value of FALSE for JavaScript returns. protected static final String STRING_FALSE = "false"; // String value of TRUE for JavaScript returns. protected static final String STRING_TRUE = "true"; public abstract SessionBean getSessionBean(); public abstract ScormApplicationService getApplicationService(); public abstract ScormSequencingService getSequencingService(); public abstract ScoBean getScoBean(); public abstract INavigable getAgent(); public abstract Object getTarget(); // Implementation of SCORM13APIInterface public String Commit(String parameter) { // TODO: Disable UI controls -- or throttle them on server -- don't mess with js // Assume failure String result = STRING_FALSE; if (null == getSessionBean()) { log.error("Null run state!"); } if (getApplicationService().commit(parameter, getSessionBean(), getScoBean())) result = STRING_TRUE; // TODO: Enable UI controls return result; } public String GetDiagnostic(String errorCode) { return getApplicationService().getDiagnostic(errorCode, getSessionBean()); } public String GetErrorString(String errorCode) { return getApplicationService().getErrorString(errorCode, getSessionBean()); } public String GetLastError() { return getApplicationService().getLastError(getSessionBean()); } public String GetValue(String parameter) { return getApplicationService().getValue(parameter, getSessionBean(), getScoBean()); } public String Initialize(String parameter) { // Assume failure String result = STRING_FALSE; if (getApplicationService().initialize(parameter, getSessionBean(), getScoBean())) result = STRING_TRUE; return result; } public String SetValue(String dataModelElement, String value) { // Assume failure String result = STRING_FALSE; if (getApplicationService().setValue(dataModelElement, value, getSessionBean(), getScoBean())) { result = STRING_TRUE; } return result; } public String Terminate(String parameter) { // Assume failure String result = STRING_FALSE; if (null == getSessionBean()) { log.error("Null run state!"); return result; } INavigationEvent navigationEvent = getApplicationService().newNavigationEvent(); boolean isSuccessful = getApplicationService().terminate(parameter, navigationEvent, getSessionBean(), getScoBean()); if (isSuccessful) { result = STRING_TRUE; if (navigationEvent.isChoiceEvent()) { getSequencingService().navigate(navigationEvent.getChoiceEvent(), getSessionBean(), getAgent(), getTarget()); } else { getSequencingService().navigate(navigationEvent.getEvent(), getSessionBean(), getAgent(), getTarget()); } } return result; } }
marktriggs/nyu-sakai-10.4
scorm/scorm-tool/src/java/org/sakaiproject/scorm/ui/player/behaviors/SCORM13API.java
Java
apache-2.0
3,425
/* * Copyright DbMaintain.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dbmaintain.script.parser.impl; import org.dbmaintain.script.parser.ScriptParser; import org.dbmaintain.script.parser.parsingstate.ParsingState; import org.dbmaintain.util.DbMaintainException; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; import java.util.Properties; /** * A class for parsing statements out of sql scripts. * <p/> * All statements should be separated with a semicolon (;). The last statement will be * added even if it does not end with a semicolon. The semicolons will not be included in the returned statements. * <p/> * This parser also takes quoted literals, double quoted text and in-line (--comment) and block (/ * comment * /) * into account when parsing the statements. * * @author Tim Ducheyne * @author Filip Neven * @author Stefan Bangels */ public class DefaultScriptParser implements ScriptParser { /** * The reader for the script content stream */ protected Reader scriptReader; /** * Whether backslash escaping is enabled */ protected boolean backSlashEscapingEnabled; /** * Parameters that must be replaced in the script. Null if there are no such parameters */ protected Properties scriptParameters; /** * The starting state */ protected ParsingState initialParsingState; /** * True if the script has ended */ protected boolean endOfScriptReached = false; /** * The current parsed character */ protected Character currentChar, nextChar; /** * Constructor for DefaultScriptParser. * * @param scriptReader the reader that will provide the script content, not null * @param initialParsingState the inial state when starting to parse a script, not null * @param backSlashEscapingEnabled true if backslash escaping is enabled * @param scriptParameters parameters that must be replaced in the script. null if there are no such parameters. */ public DefaultScriptParser(Reader scriptReader, ParsingState initialParsingState, boolean backSlashEscapingEnabled, Properties scriptParameters) { this.scriptReader = scriptReader; this.backSlashEscapingEnabled = backSlashEscapingEnabled; this.initialParsingState = initialParsingState; this.scriptParameters = scriptParameters; this.scriptReader = new BufferedReader(scriptReader); } /** * Parses the next statement out of the given script stream. * * @return the statements, null if no more statements */ public String getNextStatement() { try { return getNextStatementImpl(); } catch (IOException e) { throw new DbMaintainException("Unable to parse next statement from script.", e); } } /** * Actual implementation of getNextStatement. * * @return the statements, null if no more statements * @throws IOException if a problem occurs reading the script from the file system */ protected String getNextStatementImpl() throws IOException { StatementBuilder statementBuilder = createStatementBuilder(); // Make sure that we read currentChar when we start reading a new script. If not null, currentChar was already // set to the first character of the next statement when we read the previous statement. if (currentChar == null) { currentChar = readNextCharacter(); } while (!endOfScriptReached) { if (currentChar == null) { endOfScriptReached = true; } nextChar = readNextCharacter(); statementBuilder.addCharacter(currentChar, nextChar); currentChar = nextChar; if (statementBuilder.isComplete()) { if (statementBuilder.hasExecutableContent()) { return statementBuilder.buildStatement(); } statementBuilder = createStatementBuilder(); } } if (!statementBuilder.isComplete() && statementBuilder.hasExecutableContent()) { throw new DbMaintainException("Last statement in script was not ended correctly."); } return null; } protected Character readNextCharacter() throws IOException { int charAsInt = scriptReader.read(); return charAsInt == -1 ? null : (char) charAsInt; } /** * Factory method for the statement builder. * * @return The statement builder, not null */ protected StatementBuilder createStatementBuilder() { return new StatementBuilder(initialParsingState, scriptParameters); } }
fcamblor/dbmaintain-maven-plugin
dbmaintain/src/main/java/org/dbmaintain/script/parser/impl/DefaultScriptParser.java
Java
apache-2.0
5,339
import logging import re import socket from mopidy.config import validators from mopidy.internal import log, path def decode(value): if isinstance(value, bytes): value = value.decode(errors="surrogateescape") for char in ("\\", "\n", "\t"): value = value.replace( char.encode(encoding="unicode-escape").decode(), char ) return value def encode(value): if isinstance(value, bytes): value = value.decode(errors="surrogateescape") for char in ("\\", "\n", "\t"): value = value.replace( char, char.encode(encoding="unicode-escape").decode() ) return value class DeprecatedValue: pass class ConfigValue: """Represents a config key's value and how to handle it. Normally you will only be interacting with sub-classes for config values that encode either deserialization behavior and/or validation. Each config value should be used for the following actions: 1. Deserializing from a raw string and validating, raising ValueError on failure. 2. Serializing a value back to a string that can be stored in a config. 3. Formatting a value to a printable form (useful for masking secrets). :class:`None` values should not be deserialized, serialized or formatted, the code interacting with the config should simply skip None config values. """ def deserialize(self, value): """Cast raw string to appropriate type.""" return decode(value) def serialize(self, value, display=False): """Convert value back to string for saving.""" if value is None: return "" return str(value) class Deprecated(ConfigValue): """Deprecated value. Used for ignoring old config values that are no longer in use, but should not cause the config parser to crash. """ def deserialize(self, value): return DeprecatedValue() def serialize(self, value, display=False): return DeprecatedValue() class String(ConfigValue): """String value. Is decoded as utf-8 and \\n \\t escapes should work and be preserved. """ def __init__(self, optional=False, choices=None): self._required = not optional self._choices = choices def deserialize(self, value): value = decode(value).strip() validators.validate_required(value, self._required) if not value: return None validators.validate_choice(value, self._choices) return value def serialize(self, value, display=False): if value is None: return "" return encode(value) class Secret(String): """Secret string value. Is decoded as utf-8 and \\n \\t escapes should work and be preserved. Should be used for passwords, auth tokens etc. Will mask value when being displayed. """ def __init__(self, optional=False, choices=None): self._required = not optional self._choices = None # Choices doesn't make sense for secrets def serialize(self, value, display=False): if value is not None and display: return "********" return super().serialize(value, display) class Integer(ConfigValue): """Integer value.""" def __init__( self, minimum=None, maximum=None, choices=None, optional=False ): self._required = not optional self._minimum = minimum self._maximum = maximum self._choices = choices def deserialize(self, value): value = decode(value) validators.validate_required(value, self._required) if not value: return None value = int(value) validators.validate_choice(value, self._choices) validators.validate_minimum(value, self._minimum) validators.validate_maximum(value, self._maximum) return value class Boolean(ConfigValue): """Boolean value. Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as :class:`True`. Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as :class:`False`. """ true_values = ("1", "yes", "true", "on") false_values = ("0", "no", "false", "off") def __init__(self, optional=False): self._required = not optional def deserialize(self, value): value = decode(value) validators.validate_required(value, self._required) if not value: return None if value.lower() in self.true_values: return True elif value.lower() in self.false_values: return False raise ValueError(f"invalid value for boolean: {value!r}") def serialize(self, value, display=False): if value is True: return "true" elif value in (False, None): return "false" else: raise ValueError(f"{value!r} is not a boolean") class List(ConfigValue): """List value. Supports elements split by commas or newlines. Newlines take presedence and empty list items will be filtered out. """ def __init__(self, optional=False): self._required = not optional def deserialize(self, value): value = decode(value) if "\n" in value: values = re.split(r"\s*\n\s*", value) else: values = re.split(r"\s*,\s*", value) values = tuple(v.strip() for v in values if v.strip()) validators.validate_required(values, self._required) return tuple(values) def serialize(self, value, display=False): if not value: return "" return "\n " + "\n ".join(encode(v) for v in value if v) class LogColor(ConfigValue): def deserialize(self, value): value = decode(value) validators.validate_choice(value.lower(), log.COLORS) return value.lower() def serialize(self, value, display=False): if value.lower() in log.COLORS: return encode(value.lower()) return "" class LogLevel(ConfigValue): """Log level value. Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``, ``trace``, or ``all``, with any casing. """ levels = { "critical": logging.CRITICAL, "error": logging.ERROR, "warning": logging.WARNING, "info": logging.INFO, "debug": logging.DEBUG, "trace": log.TRACE_LOG_LEVEL, "all": logging.NOTSET, } def deserialize(self, value): value = decode(value) validators.validate_choice(value.lower(), self.levels.keys()) return self.levels.get(value.lower()) def serialize(self, value, display=False): lookup = {v: k for k, v in self.levels.items()} if value in lookup: return encode(lookup[value]) return "" class Hostname(ConfigValue): """Network hostname value.""" def __init__(self, optional=False): self._required = not optional def deserialize(self, value, display=False): value = decode(value).strip() validators.validate_required(value, self._required) if not value: return None socket_path = path.get_unix_socket_path(value) if socket_path is not None: path_str = Path(not self._required).deserialize(socket_path) return f"unix:{path_str}" try: socket.getaddrinfo(value, None) except OSError: raise ValueError("must be a resolveable hostname or valid IP") return value class Port(Integer): """Network port value. Expects integer in the range 0-65535, zero tells the kernel to simply allocate a port for us. """ def __init__(self, choices=None, optional=False): super().__init__( minimum=0, maximum=2 ** 16 - 1, choices=choices, optional=optional ) class _ExpandedPath(str): def __new__(cls, original, expanded): return super().__new__(cls, expanded) def __init__(self, original, expanded): self.original = original class Path(ConfigValue): """File system path. The following expansions of the path will be done: - ``~`` to the current user's home directory - ``$XDG_CACHE_DIR`` according to the XDG spec - ``$XDG_CONFIG_DIR`` according to the XDG spec - ``$XDG_DATA_DIR`` according to the XDG spec - ``$XDG_MUSIC_DIR`` according to the XDG spec """ def __init__(self, optional=False): self._required = not optional def deserialize(self, value): value = decode(value).strip() expanded = path.expand_path(value) validators.validate_required(value, self._required) validators.validate_required(expanded, self._required) if not value or expanded is None: return None return _ExpandedPath(value, expanded) def serialize(self, value, display=False): if isinstance(value, _ExpandedPath): value = value.original if isinstance(value, bytes): value = value.decode(errors="surrogateescape") return value
kingosticks/mopidy
mopidy/config/types.py
Python
apache-2.0
9,146
//----------------------------------------------------------------------- // <copyright file="NUnitAssertions.cs" company="Akka.NET Project"> // Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using NUnit.Framework; namespace Akka.TestKit.NUnit { /// <summary> /// Assertions for NUnit /// </summary> public class NUnitAssertions : ITestKitAssertions { public void Fail(string format = "", params object[] args) { Assert.Fail(format, args); } public void AssertTrue(bool condition, string format = "", params object[] args) { Assert.IsTrue(condition, format, args); } public void AssertFalse(bool condition, string format = "", params object[] args) { Assert.IsFalse(condition, format, args); } public void AssertEqual<T>(T expected, T actual, string format = "", params object[] args) { Assert.AreEqual(expected, actual, format, args); } public void AssertEqual<T>(T expected, T actual, Func<T, T, bool> comparer, string format = "", params object[] args) { if (!comparer(expected, actual)) throw new AssertionException(string.Format("Assert.AreEqual failed. Expected [{0}]. Actual [{1}]. {2}", FormatValue(expected), FormatValue(actual), string.Format(format, args))); } private static string FormatValue<T>(T expected) { return ReferenceEquals(expected, null) ? "null" : expected.ToString(); } } }
skotzko/akka.net
src/contrib/testkits/Akka.TestKit.NUnit/NUnitAssertions.cs
C#
apache-2.0
1,786
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2017 the original author or authors. */ package org.assertj.core.util.diff; import java.util.List; /** * Initially copied from https://code.google.com/p/java-diff-utils/. * <p> * Describes the delete-delta between original and revised texts. * * @author <a href="dm.naumenko@gmail.com">Dmitry Naumenko</a> * @param <T> The type of the compared elements in the 'lines'. */ public class DeleteDelta<T> extends Delta<T> { /** * Creates a change delta with the two given chunks. * * @param original * The original chunk. Must not be {@code null}. * @param revised * The original chunk. Must not be {@code null}. */ public DeleteDelta(Chunk<T> original, Chunk<T> revised) { super(original, revised); } /** * {@inheritDoc} */ @Override public void applyTo(List<T> target) throws IllegalStateException { verify(target); int position = getOriginal().getPosition(); int size = getOriginal().size(); for (int i = 0; i < size; i++) { target.remove(position); } } @Override public TYPE getType() { return Delta.TYPE.DELETE; } @Override public void verify(List<T> target) throws IllegalStateException { getOriginal().verify(target); } }
ChrisCanCompute/assertj-core
src/main/java/org/assertj/core/util/diff/DeleteDelta.java
Java
apache-2.0
1,820
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.constructionheuristic.greedyFit.decider; public enum ConstructionHeuristicPickEarlyType { NEVER, FIRST_LAST_STEP_SCORE_EQUAL_OR_IMPROVING; }
psiroky/optaplanner
optaplanner-core/src/main/java/org/optaplanner/core/impl/constructionheuristic/greedyFit/decider/ConstructionHeuristicPickEarlyType.java
Java
apache-2.0
779
# # Author:: Adam Jacob (<adam@chef.io>) # Author:: Tyler Cloke (<tyler@chef.io>) # Copyright:: Copyright 2008-2017, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "spec_helper" describe Chef::Resource::RemoteFile do let(:resource) { Chef::Resource::RemoteFile.new("fakey_fakerton") } describe "name_property" do it "the path property is the name_property" do expect(resource.path).to eql("fakey_fakerton") end end describe "Actions" do it "sets the default action as :create" do expect(resource.action).to eql([:create]) end it "supports :create, :create_if_missing, :delete, :touch actions" do expect { resource.action :create }.not_to raise_error expect { resource.action :create_if_missing }.not_to raise_error expect { resource.action :delete }.not_to raise_error expect { resource.action :touch }.not_to raise_error end end describe "initialize" do it "is a subclass of Chef::Resource::File" do expect(resource).to be_a_kind_of(Chef::Resource::File) end end it "says its provider is RemoteFile when the source is an absolute URI" do resource.source("http://www.google.com/robots.txt") expect(resource.provider_for_action(:create)).to be_kind_of(Chef::Provider::RemoteFile) end it "says its provider is RemoteFile when the source is a network share" do resource.source("\\\\fakey\\fakerton\\fake.txt") expect(resource.provider_for_action(:create)).to be_kind_of(Chef::Provider::RemoteFile) end describe "source" do it "does not have a default value for 'source'" do expect(resource.source).to eql([]) end it "accepts a URI for the remote file source" do resource.source "http://opscode.com/" expect(resource.source).to eql([ "http://opscode.com/" ]) end it "accepts a windows network share source" do resource.source "\\\\fakey\\fakerton\\fake.txt" expect(resource.source).to eql([ "\\\\fakey\\fakerton\\fake.txt" ]) end it "accepts file URIs with spaces" do resource.source("file:///C:/foo bar") expect(resource.source).to eql(["file:///C:/foo bar"]) end it "accepts a delayed evalutator (string) for the remote file source" do resource.source Chef::DelayedEvaluator.new { "http://opscode.com/" } expect(resource.source).to eql([ "http://opscode.com/" ]) end it "accepts an array of URIs for the remote file source" do resource.source([ "http://opscode.com/", "http://puppetlabs.com/" ]) expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ]) end it "accepts a delated evaluator (array) for the remote file source" do resource.source Chef::DelayedEvaluator.new { [ "http://opscode.com/", "http://puppetlabs.com/" ] } expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ]) end it "accepts an multiple URIs as arguments for the remote file source" do resource.source("http://opscode.com/", "http://puppetlabs.com/") expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ]) end it "only accept a single argument if a delayed evalutor is used" do expect do resource.source("http://opscode.com/", Chef::DelayedEvaluator.new { "http://opscode.com/" }) end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "only accept a single array item if a delayed evalutor is used" do expect do resource.source(["http://opscode.com/", Chef::DelayedEvaluator.new { "http://opscode.com/" }]) end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "does not accept a non-URI as the source" do expect { resource.source("not-a-uri") }.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "does not accept a non-URI as the source when read from a delayed evaluator" do expect do resource.source(Chef::DelayedEvaluator.new { "not-a-uri" }) resource.source end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI) end it "raises an exception when source is an empty array" do expect { resource.source([]) }.to raise_error(ArgumentError) end end describe "checksum" do it "accepts a string for the checksum object" do resource.checksum "asdf" expect(resource.checksum).to eql("asdf") end it "defaults to nil" do expect(resource.checksum).to eq(nil) end end describe "ftp_active_mode" do it "accepts a boolean for the ftp_active_mode object" do resource.ftp_active_mode true expect(resource.ftp_active_mode).to be_truthy end it "defaults to false" do expect(resource.ftp_active_mode).to be_falsey end end describe "conditional get options" do it "defaults to using etags and last modified" do expect(resource.use_etags).to be_truthy expect(resource.use_last_modified).to be_truthy end it "enable or disables etag and last modified options as a group" do resource.use_conditional_get(false) expect(resource.use_etags).to be_falsey expect(resource.use_last_modified).to be_falsey resource.use_conditional_get(true) expect(resource.use_etags).to be_truthy expect(resource.use_last_modified).to be_truthy end it "disables etags indivdually" do resource.use_etags(false) expect(resource.use_etags).to be_falsey expect(resource.use_last_modified).to be_truthy end it "disables last modified individually" do resource.use_last_modified(false) expect(resource.use_last_modified).to be_falsey expect(resource.use_etags).to be_truthy end end describe "when it has group, mode, owner, source, and checksum" do before do if Chef::Platform.windows? resource.path("C:/temp/origin/file.txt") resource.rights(:read, "Everyone") resource.deny_rights(:full_control, "Clumsy_Sam") else resource.path("/this/path/") resource.group("pokemon") resource.mode("0664") resource.owner("root") end resource.source("https://www.google.com/images/srpr/logo3w.png") resource.checksum("1" * 26) end it "describes its state" do state = resource.state_for_resource_reporter if Chef::Platform.windows? puts state expect(state[:rights]).to eq([{ :permissions => :read, :principals => "Everyone" }]) expect(state[:deny_rights]).to eq([{ :permissions => :full_control, :principals => "Clumsy_Sam" }]) else expect(state[:group]).to eq("pokemon") expect(state[:mode]).to eq("0664") expect(state[:owner]).to eq("root") expect(state[:checksum]).to eq("1" * 26) end end it "returns the path as its identity" do if Chef::Platform.windows? expect(resource.identity).to eq("C:/temp/origin/file.txt") else expect(resource.identity).to eq("/this/path/") end end end end
Ppjet6/chef
spec/unit/resource/remote_file_spec.rb
Ruby
apache-2.0
7,638
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.flink.translation.wrappers.streaming.io; import com.google.common.annotations.VisibleForTesting; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import org.apache.beam.runners.flink.metrics.FlinkMetricContainer; import org.apache.beam.runners.flink.metrics.ReaderInvocationUtil; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.sdk.values.ValueWithRecordId; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.StoppableFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.CheckpointListener; import org.apache.flink.runtime.state.DefaultOperatorStateBackend; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source. */ public class UnboundedSourceWrapper< OutputT, CheckpointMarkT extends UnboundedSource.CheckpointMark> extends RichParallelSourceFunction<WindowedValue<ValueWithRecordId<OutputT>>> implements ProcessingTimeCallback, StoppableFunction, CheckpointListener, CheckpointedFunction { private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceWrapper.class); private final String stepName; /** * Keep the options so that we can initialize the localReaders. */ private final SerializedPipelineOptions serializedOptions; /** * For snapshot and restore. */ private final KvCoder< ? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> checkpointCoder; /** * The split sources. We split them in the constructor to ensure that all parallel * sources are consistent about the split sources. */ private final List<? extends UnboundedSource<OutputT, CheckpointMarkT>> splitSources; /** * The local split sources. Assigned at runtime when the wrapper is executed in parallel. */ private transient List<UnboundedSource<OutputT, CheckpointMarkT>> localSplitSources; /** * The local split readers. Assigned at runtime when the wrapper is executed in parallel. * Make it a field so that we can access it in {@link #onProcessingTime(long)} for * emitting watermarks. */ private transient List<UnboundedSource.UnboundedReader<OutputT>> localReaders; /** * Flag to indicate whether the source is running. * Initialize here and not in run() to prevent races where we cancel a job before run() is * ever called or run() is called after cancel(). */ private volatile boolean isRunning = true; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for registering new * triggers. */ private transient StreamingRuntimeContext runtimeContext; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for emitting * watermarks. */ private transient SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> context; /** * Pending checkpoints which have not been acknowledged yet. */ private transient LinkedHashMap<Long, List<CheckpointMarkT>> pendingCheckpoints; /** * Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}. */ private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32; private transient ListState<KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> stateForCheckpoint; /** * false if checkpointCoder is null or no restore state by starting first. */ private transient boolean isRestored = false; @SuppressWarnings("unchecked") public UnboundedSourceWrapper( String stepName, PipelineOptions pipelineOptions, UnboundedSource<OutputT, CheckpointMarkT> source, int parallelism) throws Exception { this.stepName = stepName; this.serializedOptions = new SerializedPipelineOptions(pipelineOptions); if (source.requiresDeduping()) { LOG.warn("Source {} requires deduping but Flink runner doesn't support this yet.", source); } Coder<CheckpointMarkT> checkpointMarkCoder = source.getCheckpointMarkCoder(); if (checkpointMarkCoder == null) { LOG.info("No CheckpointMarkCoder specified for this source. Won't create snapshots."); checkpointCoder = null; } else { Coder<? extends UnboundedSource<OutputT, CheckpointMarkT>> sourceCoder = (Coder) SerializableCoder.of(new TypeDescriptor<UnboundedSource>() { }); checkpointCoder = KvCoder.of(sourceCoder, checkpointMarkCoder); } // get the splits early. we assume that the generated splits are stable, // this is necessary so that the mapping of state to source is correct // when restoring splitSources = source.split(parallelism, pipelineOptions); } /** * Initialize and restore state before starting execution of the source. */ @Override public void open(Configuration parameters) throws Exception { runtimeContext = (StreamingRuntimeContext) getRuntimeContext(); // figure out which split sources we're responsible for int subtaskIndex = runtimeContext.getIndexOfThisSubtask(); int numSubtasks = runtimeContext.getNumberOfParallelSubtasks(); localSplitSources = new ArrayList<>(); localReaders = new ArrayList<>(); pendingCheckpoints = new LinkedHashMap<>(); if (isRestored) { // restore the splitSources from the checkpoint to ensure consistent ordering for (KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> restored: stateForCheckpoint.get()) { localSplitSources.add(restored.getKey()); localReaders.add(restored.getKey().createReader( serializedOptions.getPipelineOptions(), restored.getValue())); } } else { // initialize localReaders and localSources from scratch for (int i = 0; i < splitSources.size(); i++) { if (i % numSubtasks == subtaskIndex) { UnboundedSource<OutputT, CheckpointMarkT> source = splitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = source.createReader(serializedOptions.getPipelineOptions(), null); localSplitSources.add(source); localReaders.add(reader); } } } LOG.info("Unbounded Flink Source {}/{} is reading from sources: {}", subtaskIndex, numSubtasks, localSplitSources); } @Override public void run(SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx) throws Exception { context = ctx; FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); ReaderInvocationUtil<OutputT, UnboundedSource.UnboundedReader<OutputT>> readerInvoker = new ReaderInvocationUtil<>( stepName, serializedOptions.getPipelineOptions(), metricContainer); if (localReaders.size() == 0) { // do nothing, but still look busy ... // also, output a Long.MAX_VALUE watermark since we know that we're not // going to emit anything // we can't return here since Flink requires that all operators stay up, // otherwise checkpointing would not work correctly anymore ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); // wait until this is canceled final Object waitLock = new Object(); while (isRunning) { try { // Flink will interrupt us at some point //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (waitLock) { // don't wait indefinitely, in case something goes horribly wrong waitLock.wait(1000); } } catch (InterruptedException e) { if (!isRunning) { // restore the interrupted state, and fall through the loop Thread.currentThread().interrupt(); } } } } else if (localReaders.size() == 1) { // the easy case, we just read from one reader UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(0); boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } setNextWatermarkTimer(this.runtimeContext); while (isRunning) { dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); } else { Thread.sleep(50); } } } else { // a bit more complicated, we are responsible for several localReaders // loop through them and sleep if none of them had any data int numReaders = localReaders.size(); int currentReader = 0; // start each reader and emit data if immediately available for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) { boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } } // a flag telling us whether any of the localReaders had data // if no reader had data, sleep for bit boolean hadData = false; while (isRunning) { UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(currentReader); boolean dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); hadData = true; } currentReader = (currentReader + 1) % numReaders; if (currentReader == 0 && !hadData) { Thread.sleep(50); } else if (currentReader == 0) { hadData = false; } } } } /** * Emit the current element from the given Reader. The reader is guaranteed to have data. */ private void emitElement( SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx, UnboundedSource.UnboundedReader<OutputT> reader) { // make sure that reader state update and element emission are atomic // with respect to snapshots synchronized (ctx.getCheckpointLock()) { OutputT item = reader.getCurrent(); byte[] recordId = reader.getCurrentRecordId(); Instant timestamp = reader.getCurrentTimestamp(); WindowedValue<ValueWithRecordId<OutputT>> windowedValue = WindowedValue.of(new ValueWithRecordId<>(item, recordId), timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING); ctx.collectWithTimestamp(windowedValue, timestamp.getMillis()); } } @Override public void close() throws Exception { super.close(); if (localReaders != null) { for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) { reader.close(); } } } @Override public void cancel() { isRunning = false; } @Override public void stop() { isRunning = false; } // ------------------------------------------------------------------------ // Checkpoint and restore // ------------------------------------------------------------------------ @Override public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception { if (!isRunning) { LOG.debug("snapshotState() called on closed source"); } else { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } stateForCheckpoint.clear(); long checkpointId = functionSnapshotContext.getCheckpointId(); // we checkpoint the sources along with the CheckpointMarkT to ensure // than we have a correct mapping of checkpoints to sources when // restoring List<CheckpointMarkT> checkpointMarks = new ArrayList<>(localSplitSources.size()); for (int i = 0; i < localSplitSources.size(); i++) { UnboundedSource<OutputT, CheckpointMarkT> source = localSplitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(i); @SuppressWarnings("unchecked") CheckpointMarkT mark = (CheckpointMarkT) reader.getCheckpointMark(); checkpointMarks.add(mark); KV<UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> kv = KV.of(source, mark); stateForCheckpoint.add(kv); } // cleanup old pending checkpoints and add new checkpoint int diff = pendingCheckpoints.size() - MAX_NUMBER_PENDING_CHECKPOINTS; if (diff >= 0) { for (Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); diff >= 0; diff--) { iterator.next(); iterator.remove(); } } pendingCheckpoints.put(checkpointId, checkpointMarks); } } @Override public void initializeState(FunctionInitializationContext context) throws Exception { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } OperatorStateStore stateStore = context.getOperatorStateStore(); CoderTypeInformation< KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder); stateForCheckpoint = stateStore.getOperatorState( new ListStateDescriptor<>(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME, typeInformation.createSerializer(new ExecutionConfig()))); if (context.isRestored()) { isRestored = true; LOG.info("Having restore state in the UnbounedSourceWrapper."); } else { LOG.info("No restore state for UnbounedSourceWrapper."); } } @Override public void onProcessingTime(long timestamp) throws Exception { if (this.isRunning) { synchronized (context.getCheckpointLock()) { // find minimum watermark over all localReaders long watermarkMillis = Long.MAX_VALUE; for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) { Instant watermark = reader.getWatermark(); if (watermark != null) { watermarkMillis = Math.min(watermark.getMillis(), watermarkMillis); } } context.emitWatermark(new Watermark(watermarkMillis)); } setNextWatermarkTimer(this.runtimeContext); } } private void setNextWatermarkTimer(StreamingRuntimeContext runtime) { if (this.isRunning) { long watermarkInterval = runtime.getExecutionConfig().getAutoWatermarkInterval(); long timeToNextWatermark = getTimeToNextWatermark(watermarkInterval); runtime.getProcessingTimeService().registerTimer(timeToNextWatermark, this); } } private long getTimeToNextWatermark(long watermarkInterval) { return System.currentTimeMillis() + watermarkInterval; } /** * Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getSplitSources() { return splitSources; } /** * Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getLocalSplitSources() { return localSplitSources; } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { List<CheckpointMarkT> checkpointMarks = pendingCheckpoints.get(checkpointId); if (checkpointMarks != null) { // remove old checkpoints including the current one Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); long currentId; do { currentId = iterator.next(); iterator.remove(); } while (currentId != checkpointId); // confirm all marks for (CheckpointMarkT mark : checkpointMarks) { mark.finalizeCheckpoint(); } } } }
dhalperi/beam
runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java
Java
apache-2.0
18,014
/* * Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; angular.module('ui.widgets') .directive('wtNvd3LineChart', function ($filter) { return { restrict: 'A', replace: true, templateUrl: 'template/widgets/nvd3LineChart/nvd3LineChart.html', scope: { data: '=data', showLegend: '@', showTimeRange: '=?', timeAxisFormat: '=?' }, controller: function ($scope) { var filter = $filter('date'); var numberFilter = $filter('number'); $scope.xAxisTickFormatFunction = function () { return function (d) { return filter(d, $scope.timeAxisFormat); }; }; $scope.yAxisTickFormatFunction = function () { return function (d) { if (d > 999) { var value; var scale; if (d < 999999) { value = Math.round(d/1000); scale = 'k'; } else { value = Math.round(d/1000000); scale = 'm'; } return numberFilter(value) + scale; } else { return numberFilter(d); } }; }; $scope.xFunction = function () { return function (d) { return d.timestamp; }; }; $scope.yFunction = function () { return function (d) { return d.value; }; }; }, link: function postLink(scope, element, attrs) { if (!_.has(attrs, 'showTimeRange')) { scope.showTimeRange = true; } scope.timeAxisFormat = scope.timeAxisFormat || 'HH:mm'; scope.$watch('data', function (data) { if (data && data[0] && data[0].values && (data[0].values.length > 1)) { var timeseries = _.sortBy(data[0].values, function (item) { return item.timestamp; }); var start = timeseries[0].timestamp; var end = timeseries[timeseries.length - 1].timestamp; scope.start = start; scope.end = end; } }); } }; });
DataTorrent/malhar-angular-widgets
src/widgets/nvd3LineChart/nvd3LineChart.js
JavaScript
apache-2.0
2,754
/* Copyright 2018 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ try { Object.defineProperty(Array.prototype, "peek", { value: function () { return (this.length > 0 ? this[this.length - 1] : undefined); } }); } catch (e) { } try { Object.defineProperty(String.prototype, "replaceAll", { value: function replaceAll(oldVal, newVal) { return (this.split(oldVal).join(newVal)); } }); } catch (e) { } var RSMB = 1381190978; var memoryLocation = { 0x1: 'Other', 0x2: 'Unknown', 0x3: 'System Board', 0x4: 'ISA', 0x5: 'EISA', 0x6: 'PCI', 0x7: 'MCA', 0x8: 'PCMCIA', 0x9: 'Proprietary', 0xA: 'NuBus', 0xA0: 'PC-98/C20', 0xA1: 'PC-98/C24', 0xA2: 'PC-98/E', 0xA3: 'PC-98/LB' }; var wakeReason = ['Reserved', 'Other', 'Unknown', 'APM Timer', 'Modem Ring', 'LAN', 'Power Switch', 'PCI', 'AC Power']; // Fill the left with zeros until the string is of a given length function zeroLeftPad(str, len) { if ((len == null) && (typeof (len) != 'number')) { return null; } if (str == null) str = ''; // If null, this is to generate zero leftpad string var zlp = ''; for (var i = 0; i < len - str.length; i++) { zlp += '0'; } return zlp + str; } function SMBiosTables() { this._ObjectID = 'SMBiosTable'; if (process.platform == 'win32') { this._marshal = require('_GenericMarshal'); this._native = this._marshal.CreateNativeProxy("Kernel32.dll"); this._native.CreateMethod('EnumSystemFirmwareTables'); this._native.CreateMethod('GetSystemFirmwareTable'); } if (process.platform == 'linux') { this._canonicalizeData = function _canonicalizeData(data) { var lines = data.toString().split('Header and Data:\x0A'); var MemoryStream = require('MemoryStream'); var ms = new MemoryStream(); for (var i = 1; i < lines.length; ++i) { var tokens = lines[i].split('Strings:\x0A'); var header = tokens[0].split('\x0A\x0A')[0].replaceAll('\x0A', '').trim().replaceAll(' ', '').replaceAll('\x09', ''); ms.write(Buffer.from(header, 'hex')); if (tokens.length > 1) { var strings = tokens[1].split('\x0A\x0A')[0].split('\x0A'); var stringsFinal = []; for (var strx in strings) { var tmp = strings[strx].trim().replaceAll(' ', '').replaceAll('\x09', ''); if (!(tmp[0] == '"')) { stringsFinal.push(tmp); } } ms.write(Buffer.from(stringsFinal.join(''), 'hex')); ms.write(Buffer.from('00', 'hex')); } else { ms.write(Buffer.from('0000', 'hex')); } } var retVal = ms.buffer; retVal.ms = ms; return (retVal); }; } this._parse = function _parse(SMData) { var ret = {}; var pbyte; var i = 0 var SMData; var structcount = 0; while (SMData && i < SMData.length) { var SMtype = SMData[i]; var SMlength = SMData[i + 1]; if (!ret[SMtype]) { ret[SMtype] = []; } ret[SMtype].push(SMData.slice(i + 4, i + SMlength)); if (process.platform == 'win32') { ret[SMtype].peek()._ext = pbyte; } i += SMlength; ret[SMtype].peek()._strings = []; while (SMData[i] != 0 && i <= SMData.length) { var strstart = i; // Start of String, find end of string while (SMData[i++] != 0 && i <= SMData.length); try { ret[SMtype].peek()._strings.push(SMData.slice(strstart, i).toString().trim()); } catch (ee) { } } i += (ret[SMtype].peek()._strings.length == 0) ? 2 : 1; ++structcount; //console.log('End of Table[' + SMtype + ']: ' + i); } //console.log('Struct Count = ' + structcount); return (ret); }; this.get = function get(callback) { if (process.platform == 'win32') { var size = this._native.GetSystemFirmwareTable(RSMB, 0, 0, 0).Val; //console.log('Table Size: ' + size); var PtrSize = this._marshal.CreatePointer()._size; var buffer = this._marshal.CreateVariable(size); var written = this._native.GetSystemFirmwareTable(RSMB, 0, buffer, size).Val; //console.log('Written Size: ' + written); var rawBuffer = buffer.toBuffer(); var length = buffer.Deref(4, 4).toBuffer().readUInt32LE(0); pbyte = buffer.Deref(8, length); SMData = pbyte.toBuffer(); if (callback) { callback.apply(this, [this._parse(SMData)]); return; } else { return (this._parse(SMData)); } } if (process.platform == 'linux') { var MemoryStream = require('MemoryStream'); this.child = require('child_process').execFile('/usr/sbin/dmidecode', ['dmidecode', '-u']); this.child.SMBiosTable = this; this.child.ms = new MemoryStream(); this.child.ms.callback = callback; this.child.ms.child = this.child; this.child.stdout.on('data', function (buffer) { this.parent.ms.write(buffer); }); this.child.on('exit', function () { this.ms.end(); }); this.child.ms.on('end', function () { //console.log('read ' + this.buffer.length + ' bytes'); if (this.buffer.length < 300) { //console.log('Not enough permission to read SMBiosTable'); if (this.callback) { this.callback.apply(this.child.SMBiosTable, []); } } else { var SMData = this.child.SMBiosTable._canonicalizeData(this.buffer); var j = this.child.SMBiosTable._parse(SMData); if (this.callback) { this.callback.apply(this.child.SMBiosTable, [j]); } } }); return; } if (callback) { callback.apply(this, [null]); return; } else { return (null); } }; this.parse = function parse(data) { var r = {}; try { r.processorInfo = this.processorInfo(data); } catch(e) { } try { r.memoryInfo = this.memoryInfo(data); } catch(e) { } try { r.systemInfo = this.systemInfo(data); } catch(e) { } try { r.systemSlots = this.systemInfo(data); } catch(e) { } try { r.amtInfo = this.amtInfo(data); } catch(e) { } try { if (JSON.stringify(r).length > 65535) { r = {}; } } catch(ee) {} return r; } this.processorInfo = function processorInfo(data) { if (!data) { throw ('no data'); } var ret = []; var ptype = ['ERROR', 'Other', 'Unknown', 'CPU', 'ALU', 'DSP', 'GPU']; var statusString = ['Unknown', 'Enabled', 'Disabled by user', 'Disabled by BIOS', 'Idle', 'Reserved', 'Reserved', 'Other']; var cpuid = 0; while (data[4] && data[4].length > 0) { var p = data[4].pop(); var populated = p[20] & 0x40; var status = p[20] & 0x07 if (populated) { var j = { _ObjectID: 'SMBiosTables.processorInfo' }; j.Processor = ptype[p[1]]; j.MaxSpeed = p.readUInt16LE(16) + ' Mhz'; if (p[31]) { j.Cores = p[31]; } if (p[33]) { j.Threads = p[33]; } j.Populated = 1; j.Status = statusString[status]; j.Socket = p._strings[p[0] - 1]; j.Manufacturer = p._strings[p[3] - 1]; j.Version = p._strings[p[12] - 1]; ret.push(j); } } return (ret); }; this.memoryInfo = function memoryInfo(data) { if (!data) { throw ('no data'); } var retVal = { _ObjectID: 'SMBiosTables.memoryInfo' }; if (data[16]) { var m = data[16].peek(); retVal.location = memoryLocation[m[0]]; if ((retVal.maxCapacityKb = m.readUInt32LE(3)) == 0x80000000) { retVal.maxCapacityKb = 'A really big number'; } } return (retVal); }; this.systemInfo = function systemInfo(data) { if (!data) { throw ('no data'); } var retVal = { _ObjectID: 'SMBiosTables.systemInfo' }; if (data[1]) { var si = data[1].peek(); var uuid = si.slice(4, 20); retVal.uuid = [zeroLeftPad(uuid.readUInt32LE(0).toString(16), 8), zeroLeftPad(uuid.readUInt16LE(4).toString(16), 4), zeroLeftPad(uuid.readUInt16LE(6).toString(16), 4), zeroLeftPad(uuid.readUInt16BE(8).toString(16), 4), zeroLeftPad(uuid.slice(10).toString('hex').toLowerCase(), 12)].join('-'); retVal.wakeReason = wakeReason[si[20]]; } return (retVal); }; this.systemSlots = function systemSlots(data) { if (!data) { throw ('no data'); } var retVal = []; if (data[9]) { while (data[9].length > 0) { var ss = data[9].pop(); retVal.push({ name: ss._strings[ss[0] - 1] }); } } return (retVal); }; this.amtInfo = function amtInfo(data) { if (!data) { throw ('no data'); } var retVal = { AMT: false }; if (data[130] && data[130].peek().slice(0, 4).toString() == '$AMT') { var amt = data[130].peek(); retVal.AMT = amt[4] ? true : false; if (retVal.AMT) { retVal.enabled = amt[5] ? true : false; retVal.storageRedirection = amt[6] ? true : false; retVal.serialOverLan = amt[7] ? true : false; retVal.kvm = amt[14] ? true : false; if (data[131].peek() && data[131].peek().slice(52, 56).toString() == 'vPro') { var settings = data[131].peek(); if (settings[0] & 0x04) { retVal.TXT = (settings[0] & 0x08) ? true : false; } if (settings[0] & 0x10) { retVal.VMX = (settings[0] & 0x20) ? true : false; } retVal.MEBX = settings.readUInt16LE(4).toString() + '.' + settings.readUInt16LE(6).toString() + '.' + settings.readUInt16LE(8).toString() + '.' + settings.readUInt16LE(10).toString(); var mecap = settings.slice(20, 32); retVal.ManagementEngine = mecap.readUInt16LE(6).toString() + '.' + mecap.readUInt16LE(4).toString() + '.' + mecap.readUInt16LE(10).toString() + '.' + mecap.readUInt16LE(8).toString(); //var lan = settings.slice(36, 48); //console.log(lan.toString('hex')); //retVal.LAN = (lan.readUInt16LE(10) & 0x03).toString() + '/' + ((lan.readUInt16LE(10) & 0xF8) >> 3).toString(); //console.log(lan.readUInt16LE(3)); //retVal.WLAN = (lan.readUInt16LE(3) & 0x07).toString() + '/' + ((lan.readUInt16LE(3) & 0xF8) >> 3).toString() + '/' + (lan.readUInt16LE(3) >> 8).toString(); } } } return (retVal); }; this.smTableTypes = { 0: 'BIOS information', 1: 'System information', 2: 'Baseboard (or Module) information', 4: 'Processor information', 5: 'memory controller information', 6: 'Memory module information', 7: 'Cache information', 8: 'Port connector information', 9: 'System slots', 10: 'On board devices information', 11: 'OEM strings', 12: 'System configuration options', 13: 'BIOS language information', 14: 'Group associations', 15: 'System event log', 16: 'Physical memory array', 17: 'Memory device', 18: '32bit memory error information', 19: 'Memory array mapped address', 20: 'Memory device mapped address', 21: 'Built-in pointing device', 22: 'Portable battery', 23: 'System reset', 24: 'Hardware security', 25: 'System power controls', 26: 'Voltage probe', 27: 'Cooling device', 28: 'Temperature probe', 29: 'Electrical current probe', 30: 'Out-of-band remote access', 31: 'Boot integrity services (BIS) entry point', 32: 'System boot information', 33: '64bit memory error information', 34: 'Management device', 35: 'Management device component', 36: 'Management device threshold data', 37: 'Memory channel', 38: 'IPMI device information', 39: 'System power supply', 40: 'Additional information', 41: 'Onboard devices extended information', 42: 'Management controller host interface', 126: 'Inactive', 127: 'End-of-table' } } module.exports = new SMBiosTables();
Ylianst/MeshCentral
agents/modules_meshcore/smbios.js
JavaScript
apache-2.0
13,897
/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.mss.internal.mime; import org.junit.Assert; import org.junit.Test; /** * Test the functionality of MimeMapper */ public class MimeMapperTest { @Test public void testMimeMappingForKnownExtension() throws MimeMappingException { String mimeType = MimeMapper.getMimeType("png"); Assert.assertEquals("image/png", mimeType); } @Test(expected = MimeMappingException.class) public void testMimeMappingForUnknownExtension() throws MimeMappingException { MimeMapper.getMimeType("unknownext"); } }
susinda/product-mss
carbon-mss/components/org.wso2.carbon.mss/src/test/java/org/wso2/carbon/mss/internal/mime/MimeMapperTest.java
Java
apache-2.0
1,232
/** * vue app * Created by HC on 2016/7/19. */ var header = Vue.extend({ template: '#header' }); // 全局注册组件 Vue.component('my-header', header); var footer = Vue.extend({ template: '#footer' }); // 全局注册组件 Vue.component('my-footer', footer); var index = Vue.extend({ template: '#index' }); var App = Vue.extend({}); var router = new VueRouter(); router.map({ '/': { component: index }, '/bar': { component: footer } }); // Now we can start the app! // The router will create an instance of App and mount to // the element matching the selector #app. router.start(App, '#app');
jlkm2010/blog
src/main/resources/static/front/app.js
JavaScript
apache-2.0
653
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; using System.Web.UI.WebControls; using System.Xml; namespace OpenRiaServices.DomainServices.Server { /// <summary> /// Represents a domain operation method within a DomainService /// </summary> public abstract class DomainOperationEntry { private DomainOperation _operation; private ReadOnlyCollection<DomainOperationParameter> _effectiveParameters; private bool _hasOutCountParameter; private string _methodName; private Attribute _operationAttribute; private AttributeCollection _attributes; private Type _associatedType; private Type _actualReturnType; private Type _returnType; private Type _domainServiceType; private bool? _requiresValidation; private bool? _requiresAuthorization; private Func<object, object> _unwrapTaskResultFunc; /// <summary> /// Initializes a new instance of the DomainOperationEntry class /// </summary> /// <param name="domainServiceType">The <see cref="DomainService"/> Type this operation is a member of.</param> /// <param name="name">The name of the operation</param> /// <param name="operation">The <see cref="DomainOperation"/></param> /// <param name="returnType">The return Type of the operation</param> /// <param name="parameters">The parameter definitions for the operation</param> /// <param name="attributes">The method level attributes for the operation</param> protected DomainOperationEntry(Type domainServiceType, string name, DomainOperation operation, Type returnType, IEnumerable<DomainOperationParameter> parameters, AttributeCollection attributes) { if (string.IsNullOrEmpty(name)) { throw new ArgumentNullException("name"); } if (returnType == null) { throw new ArgumentNullException("returnType"); } if (parameters == null) { throw new ArgumentNullException("parameters"); } if (attributes == null) { throw new ArgumentNullException("attributes"); } if (domainServiceType == null) { throw new ArgumentNullException("domainServiceType"); } if (operation == DomainOperation.None) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, Resource.InvalidDomainOperationEntryType, Enum.GetName(typeof(DomainOperation), operation))); } bool isTaskType = TypeUtility.IsTaskType(returnType); this._methodName = isTaskType ? RemoveAsyncFromName(name) : name; this._actualReturnType = returnType; this._returnType = isTaskType ? TypeUtility.GetTaskReturnType(returnType) : returnType; this._attributes = attributes; this._operation = operation; this._domainServiceType = domainServiceType; List<DomainOperationParameter> effectiveParameters = parameters.ToList(); int paramCount = effectiveParameters.Count; if (paramCount > 0) { DomainOperationParameter lastParameter = effectiveParameters[paramCount - 1]; if (lastParameter.IsOut && lastParameter.ParameterType.HasElementType && lastParameter.ParameterType.GetElementType() == typeof(int)) { this._hasOutCountParameter = true; effectiveParameters = effectiveParameters.Take(paramCount - 1).ToList(); } } this._effectiveParameters = effectiveParameters.AsReadOnly(); } /// <summary> /// Removes any trailing "Async" from the specific name. /// </summary> /// <param name="name">A name.</param> /// <returns>name, but without "Async" at the end</returns> private static string RemoveAsyncFromName(string name) { const string async = "Async"; if (name.EndsWith(async) && name.Length > async.Length) return name.Substring(0, name.Length - async.Length); else return name; } /// <summary> /// Gets a string value indicating the logical operation type /// corresponding to the current <see cref="Operation"/> value. /// </summary> /// <value> /// The value returned by this property is used in <see cref="System.ComponentModel.DataAnnotations.AuthorizationContext.OperationType"/> /// to describe the category of operation being authorized. /// <para>This helper property exists to avoid the overhead of <see cref="Enum.GetName"/> and /// to map"Custom" into "Update". These strings are not localized because they are meant /// to be used in authorization rules that work independent of culture. /// </para> /// </value> internal string OperationType { get { switch (this.Operation) { case DomainOperation.Query: return "Query"; case DomainOperation.Insert: return "Insert"; case DomainOperation.Update: case DomainOperation.Custom: return "Update"; case DomainOperation.Delete: return "Delete"; case DomainOperation.Invoke: return "Invoke"; default: System.Diagnostics.Debug.Fail("Unknown DomainOperation type"); return "Unknown"; } } } /// <summary> /// Gets the <see cref="DomainService"/> Type this operation is a member of. /// </summary> public Type DomainServiceType { get { return this._domainServiceType; } } /// <summary> /// Gets the name of the operation /// </summary> public string Name { get { return this._methodName; } } /// <summary> /// Gets the attribute that contains metadata about the operation. /// </summary> public Attribute OperationAttribute { get { this.InitializeOperationAttribute(); return this._operationAttribute; } } /// <summary> /// Gets a value indicating whether this operation requires validation. /// </summary> internal bool RequiresValidation { get { if (!this._requiresValidation.HasValue) { // Determine whether this operation requires validation. this._requiresValidation = this._attributes[typeof(ValidationAttribute)] != null; if (!this._requiresValidation.Value) { this._requiresValidation = this.Parameters.Any(p => p.Attributes[typeof(ValidationAttribute)] != null); } if (!this._requiresValidation.Value) { this._requiresValidation = this.Parameters.Any(p => { // Complex parameters need to be validated if validation occurs on the // type itself. if (TypeUtility.IsSupportedComplexType(p.ParameterType)) { Type complexType = TypeUtility.GetElementType(p.ParameterType); MetaType metaType = MetaType.GetMetaType(complexType); return metaType.RequiresValidation; } return false; }); } } return this._requiresValidation.Value; } } /// <summary> /// Gets a value indicating whether this operation requires authorization. /// </summary> internal bool RequiresAuthorization { get { if (!this._requiresAuthorization.HasValue) { // Determine whether this operation requires authorization. AuthorizationAttributes may appear on // the DomainService type as well as the DomainOperationEntry method. this._requiresAuthorization = this._attributes[typeof(AuthorizationAttribute)] != null; if (!this._requiresAuthorization.Value) { this._requiresAuthorization = DomainServiceDescription.GetDescription(this._domainServiceType).Attributes[typeof(AuthorizationAttribute)] != null; } } return this._requiresAuthorization.Value; } } /// <summary> /// Based on the operation type specified, create the default corresponding attribute /// if it hasn't been specified explicitly, and add it to the attributes collection. /// </summary> private void InitializeOperationAttribute() { if (this._operationAttribute != null) { return; } bool attributeCreated = false; switch (this._operation) { case DomainOperation.Query: this._operationAttribute = this._attributes[typeof(QueryAttribute)]; if (this._operationAttribute == null) { QueryAttribute qa = new QueryAttribute(); // singleton returning query methods aren't composable qa.IsComposable = TypeUtility.FindIEnumerable(this.ReturnType) != null; this._operationAttribute = qa; attributeCreated = true; } break; case DomainOperation.Insert: this._operationAttribute = this._attributes[typeof(InsertAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new InsertAttribute(); attributeCreated = true; } break; case DomainOperation.Update: this._operationAttribute = this._attributes[typeof(UpdateAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new UpdateAttribute(); attributeCreated = true; } break; case DomainOperation.Delete: this._operationAttribute = this._attributes[typeof(DeleteAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new DeleteAttribute(); attributeCreated = true; } break; case DomainOperation.Invoke: this._operationAttribute = this._attributes[typeof(InvokeAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new InvokeAttribute(); attributeCreated = true; } break; case DomainOperation.Custom: this._operationAttribute = this._attributes[typeof(EntityActionAttribute)]; if (this._operationAttribute == null) { this._operationAttribute = new EntityActionAttribute(); attributeCreated = true; } break; default: break; } if (attributeCreated) { if (this._attributes == null) { this._attributes = new AttributeCollection(this._operationAttribute); } else { this._attributes = AttributeCollection.FromExisting(this._attributes, this._operationAttribute); } } } /// <summary> /// Gets the attributes for the operation /// </summary> public AttributeCollection Attributes { get { this.InitializeOperationAttribute(); return this._attributes; } internal set { this._attributes = value; // need to reset computed flags that are based // on operation attributes so they will be recomputed this._requiresValidation = null; this._requiresAuthorization = null; } } /// <summary> /// Gets the return Type of the operation /// </summary> public Type ReturnType { get { return this._returnType; } } /// <summary> /// Gets a value indicating whether the actual return type is a Task or Task{T}. /// </summary> public bool IsTaskAsync { get { return TypeUtility.IsTaskType(this._actualReturnType); } } /// <summary> /// Gets the parameters of the operation /// </summary> public ReadOnlyCollection<DomainOperationParameter> Parameters { get { return this._effectiveParameters; } } /// <summary> /// Invokes this <see cref="DomainOperationEntry" />. /// </summary> /// <param name="domainService">The <see cref="DomainService"/> instance the operation is being invoked on.</param> /// <param name="parameters">The parameters to pass to the method.</param> /// <returns>The return value of the invoked method.</returns> public abstract object Invoke(DomainService domainService, object[] parameters); /// <summary> /// Gets the type of domain operation implemented by the method. /// </summary> public DomainOperation Operation { get { return this._operation; } internal set { this._operation = value; } } /// <summary> /// Returns the associated Type this DomainOperation operates on. For query methods /// this will be the element type of the return type (or the singleton return Type), /// and for all other methods this will be the Type of the first method parameter. /// </summary> public Type AssociatedType { get { if (this._associatedType == null) { if (this.Operation == DomainOperation.Query) { Type entityType = TypeUtility.FindIEnumerable(this.ReturnType); if (entityType != null) { entityType = entityType.GetGenericArguments()[0]; } else { entityType = this.ReturnType; } this._associatedType = entityType; } else { if (this.Parameters.Count > 0) { this._associatedType = this.Parameters[0].ParameterType; } } } return this._associatedType; } } private bool HasOutCountParameter { get { return this._hasOutCountParameter; } } /// <summary> /// Invokes this <see cref="DomainOperationEntry" />. /// </summary> /// <param name="domainService">The <see cref="DomainService"/> instance the operation is being invoked on.</param> /// <param name="parameters">The parameters to pass to the method.</param> /// <param name="totalCount">The total number of rows for the input query without any paging applied to it.</param> /// <returns>The return value of the invoked method.</returns> internal object Invoke(DomainService domainService, object[] parameters, out int totalCount) { if (this.HasOutCountParameter) { object[] parametersWithCount = new object[parameters.Length + 1]; parameters.CopyTo(parametersWithCount, 0); parametersWithCount[parameters.Length] = 0; object result = this.Invoke(domainService, parametersWithCount); totalCount = (int)parametersWithCount[parameters.Length]; return result; } else { totalCount = DomainService.TotalCountUndefined; return this.Invoke(domainService, parameters); } } internal object UnwrapTaskResult(object result) { if (!IsTaskAsync) return result; if (_unwrapTaskResultFunc == null) { if (ReturnType == typeof (void)) _unwrapTaskResultFunc = UnwrapVoidResult; else { _unwrapTaskResultFunc = (Func<object, object>)Delegate.CreateDelegate(typeof(Func<object, object>), typeof(DomainOperationEntry).GetMethod("UnwrapGenericResult", BindingFlags.Static | BindingFlags.NonPublic) .MakeGenericMethod(this.ReturnType)); } } return _unwrapTaskResultFunc(result); } private static object UnwrapVoidResult(object result) { if(result == null) throw new InvalidOperationException("Task method returned null"); ((Task) result).Wait(); return null; } private static object UnwrapGenericResult<T>(object result) { if(result == null) throw new InvalidOperationException("Task method returned null"); return ((Task<T>) result).Result; } /// <summary> /// Returns a textual description of the <see cref="DomainOperationEntry"/>. /// </summary> /// <returns>A string representation of the <see cref="DomainOperationEntry"/>.</returns> public override string ToString() { StringBuilder output = new StringBuilder(); output.AppendFormat(CultureInfo.InvariantCulture, "{0} {1}(", this.ReturnType, this.Name); for (int i = 0; i < this.Parameters.Count; i++) { if (i > 0) { output.Append(", "); } output.Append(this.Parameters[i].ToString()); } output.Append(')'); return output.ToString(); } } }
STAH/OpenRiaServices
OpenRiaServices.DomainServices.Server/Framework/Data/DomainOperationEntry.cs
C#
apache-2.0
20,260
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.elasticjob.lite.spring.namespace.job; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.elasticjob.infra.concurrent.BlockUtils; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap; import org.apache.shardingsphere.elasticjob.lite.internal.schedule.JobRegistry; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.DataflowElasticJob; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.FooSimpleElasticJob; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.test.AbstractZookeeperJUnit4SpringContextTests; import org.apache.shardingsphere.elasticjob.reg.base.CoordinatorRegistryCenter; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.annotation.Resource; import static org.junit.Assert.assertTrue; @RequiredArgsConstructor public abstract class AbstractOneOffJobSpringIntegrateTest extends AbstractZookeeperJUnit4SpringContextTests { private final String simpleJobName; private final String throughputDataflowJobName; @Resource private CoordinatorRegistryCenter regCenter; @Before @After public void reset() { FooSimpleElasticJob.reset(); DataflowElasticJob.reset(); } @After public void tearDown() { JobRegistry.getInstance().shutdown(simpleJobName); JobRegistry.getInstance().shutdown(throughputDataflowJobName); } @Test public void assertSpringJobBean() { assertSimpleElasticJobBean(); assertThroughputDataflowElasticJobBean(); } private void assertSimpleElasticJobBean() { OneOffJobBootstrap bootstrap = applicationContext.getBean(simpleJobName, OneOffJobBootstrap.class); bootstrap.execute(); while (!FooSimpleElasticJob.isCompleted()) { BlockUtils.waitingShortTime(); } assertTrue(FooSimpleElasticJob.isCompleted()); assertTrue(regCenter.isExisted("/" + simpleJobName + "/sharding")); } private void assertThroughputDataflowElasticJobBean() { OneOffJobBootstrap bootstrap = applicationContext.getBean(throughputDataflowJobName, OneOffJobBootstrap.class); bootstrap.execute(); while (!DataflowElasticJob.isCompleted()) { BlockUtils.waitingShortTime(); } assertTrue(DataflowElasticJob.isCompleted()); assertTrue(regCenter.isExisted("/" + throughputDataflowJobName + "/sharding")); } }
dangdangdotcom/elastic-job
elasticjob-lite/elasticjob-lite-spring/elasticjob-lite-spring-namespace/src/test/java/org/apache/shardingsphere/elasticjob/lite/spring/namespace/job/AbstractOneOffJobSpringIntegrateTest.java
Java
apache-2.0
3,370
/*- * #%L * ELK Reasoner Core * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2016 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.semanticweb.elk.reasoner.entailments.impl; import java.util.Collections; import java.util.List; import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom; import org.semanticweb.elk.reasoner.entailments.model.DerivedClassInclusionEntailsObjectPropertyAssertionAxiom; import org.semanticweb.elk.reasoner.entailments.model.Entailment; import org.semanticweb.elk.reasoner.entailments.model.EntailmentInference; import org.semanticweb.elk.reasoner.entailments.model.ObjectPropertyAssertionAxiomEntailment; import org.semanticweb.elk.reasoner.saturation.conclusions.model.SubClassInclusionComposed; public class DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl extends AbstractAxiomEntailmentInference<ElkObjectPropertyAssertionAxiom, ObjectPropertyAssertionAxiomEntailment> implements DerivedClassInclusionEntailsObjectPropertyAssertionAxiom { private final SubClassInclusionComposed reason_; public DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl( final ObjectPropertyAssertionAxiomEntailment conclusion, final SubClassInclusionComposed reason) { super(conclusion); this.reason_ = reason; } @Override public List<? extends Entailment> getPremises() { return Collections.emptyList(); } @Override public SubClassInclusionComposed getReason() { return reason_; } @Override public <O> O accept(final EntailmentInference.Visitor<O> visitor) { return visitor.visit(this); } }
liveontologies/elk-reasoner
elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/entailments/impl/DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl.java
Java
apache-2.0
2,181
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud; import org.dasein.cloud.admin.AdminServices; import org.dasein.cloud.ci.CIServices; import org.dasein.cloud.compute.ComputeServices; import org.dasein.cloud.identity.IdentityServices; import org.dasein.cloud.network.NetworkServices; import org.dasein.cloud.platform.PlatformServices; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Simple base implementation of a cloud provider bootstrap object that defaults all services to <code>null</code>. * @author George Reese * @version 2013.07 added javadoc, fixed annotations on data center services, made it return an NPE * @since unknown */ public abstract class AbstractCloud extends CloudProvider { /** * Constructs a cloud provider instance. */ public AbstractCloud() { } @Override public @Nullable AdminServices getAdminServices() { return null; } @Override public @Nullable ComputeServices getComputeServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getComputeServices()); } @Override public @Nonnull ContextRequirements getContextRequirements() { return new ContextRequirements( new ContextRequirements.Field("apiKeys", ContextRequirements.FieldType.KEYPAIR), new ContextRequirements.Field("x509", ContextRequirements.FieldType.KEYPAIR, false) ); } @Override public @Nullable CIServices getCIServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getCIServices()); } @Override public @Nullable IdentityServices getIdentityServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getIdentityServices()); } @Override public @Nullable NetworkServices getNetworkServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getNetworkServices()); } @Override public @Nullable PlatformServices getPlatformServices() { CloudProvider compute = getComputeCloud(); return ( compute == null ? null : compute.getPlatformServices() ); } }
OSS-TheWeatherCompany/dasein-cloud-core
src/main/java/org/dasein/cloud/AbstractCloud.java
Java
apache-2.0
3,068
package migrations import "github.com/BurntSushi/migration" func ReplaceStepLocationWithPlanID(tx migration.LimitedTx) error { _, err := tx.Exec(` ALTER TABLE containers DROP COLUMN step_location; `) if err != nil { return err } _, err = tx.Exec(` ALTER TABLE containers ADD COLUMN plan_id text; `) return err }
homedepot/github-webhook
vendor/github.com/concourse/atc/db/migrations/61_replace_step_location_with_plan_id.go
GO
apache-2.0
333
#-- # Author:: Daniel DeLeo (<dan@chef.io>) # Copyright:: Copyright 2012-2018, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "chef/node/common_api" require "chef/node/mixin/state_tracking" class Chef class Node # == AttrArray # AttrArray is identical to Array, except that it keeps a reference to the # "root" (Chef::Node::Attribute) object, and will trigger a cache # invalidation on that object when mutated. class AttrArray < Array MUTATOR_METHODS = [ :<<, :[]=, :clear, :collect!, :compact!, :default=, :default_proc=, :delete_at, :delete_if, :fill, :flatten!, :insert, :keep_if, :map!, :merge!, :pop, :push, :update, :reject!, :reverse!, :replace, :select!, :shift, :slice!, :sort!, :sort_by!, :uniq!, :unshift, ] # For all of the methods that may mutate an Array, we override them to # also invalidate the cached merged_attributes on the root # Node::Attribute object. MUTATOR_METHODS.each do |mutator| define_method(mutator) do |*args, &block| ret = super(*args, &block) send_reset_cache ret end end def delete(key, &block) send_reset_cache(__path__, key) super end def initialize(data = []) super(data) map! { |e| convert_value(e) } end # For elements like Fixnums, true, nil... def safe_dup(e) e.dup rescue TypeError e end def dup Array.new(map { |e| safe_dup(e) }) end private def convert_value(value) case value when VividMash value when AttrArray value when Hash VividMash.new(value, __root__, __node__, __precedence__) when Array AttrArray.new(value, __root__, __node__, __precedence__) else value end end # needed for __path__ def convert_key(key) key end prepend Chef::Node::Mixin::StateTracking end # == VividMash # VividMash is identical to a Mash, with a few exceptions: # * It has a reference to the root Chef::Node::Attribute to which it # belongs, and will trigger cache invalidation on that object when # mutated. # * It auto-vivifies, that is a reference to a missing element will result # in the creation of a new VividMash for that key. (This only works when # using the element reference method, `[]` -- other methods, such as # #fetch, work as normal). # * attr_accessor style element set and get are supported via method_missing class VividMash < Mash include CommonAPI # Methods that mutate a VividMash. Each of them is overridden so that it # also invalidates the cached merged_attributes on the root Attribute # object. MUTATOR_METHODS = [ :clear, :delete_if, :keep_if, :merge!, :update, :reject!, :replace, :select!, :shift, ] # For all of the mutating methods on Mash, override them so that they # also invalidate the cached `merged_attributes` on the root Attribute # object. def delete(key, &block) send_reset_cache(__path__, key) super end MUTATOR_METHODS.each do |mutator| define_method(mutator) do |*args, &block| send_reset_cache super(*args, &block) end end def initialize(data = {}) super(data) end def [](key) value = super if !key?(key) value = self.class.new({}, __root__) self[key] = value else value end end def []=(key, value) ret = super send_reset_cache(__path__, key) ret # rubocop:disable Lint/Void end alias :attribute? :has_key? def convert_key(key) super end # Mash uses #convert_value to mashify values on input. # We override it here to convert hash or array values to VividMash or # AttrArray for consistency and to ensure that the added parts of the # attribute tree will have the correct cache invalidation behavior. def convert_value(value) case value when VividMash value when AttrArray value when Hash VividMash.new(value, __root__, __node__, __precedence__) when Array AttrArray.new(value, __root__, __node__, __precedence__) else value end end def dup Mash.new(self) end prepend Chef::Node::Mixin::StateTracking end end end
juliandunn/chef
lib/chef/node/attribute_collections.rb
Ruby
apache-2.0
5,477
'use strict'; /* global describe, it */ var fs = require('fs'); var expect = require('chai').expect; var bigrig = require('../'); describe('Big Rig', function () { it ('throws if no processes are found', function () { expect(function () { bigrig.analyze(null); }).to.throw('Zero processes (tabs) found.'); }); it ('throws if given invalid input data is given', function () { expect(function () { bigrig.analyze('wobble'); }).to.throw('Invalid trace contents; not JSON'); }); it ('throws if given a trace with extensions and strict mode is enabled', function (done) { fs.readFile('./test/data/load-extensions.json', 'utf8', function (err, data) { if (err) { throw err; } var error = 'Extensions running during capture; ' + 'see http://bit.ly/bigrig-extensions'; expect(function () { bigrig.analyze(data, { strict: true }); }).to.throw(error); done(); }); }); // TODO(paullewis) Add multiprocess test. it ('returns JSON for a file with a single process', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData).to.be.an('array'); expect(jsonData[0]).to.be.an('object'); done(); }); }); it ('generates valid JSON', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); jsonData = JSON.parse(JSON.stringify(jsonData)); expect(jsonData).to.be.an('array'); done(); }); }); it ('supports timed ranges', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0]).to.be.an('object'); expect(jsonData[0].title).to.equal('sideNavAnimation'); expect(jsonData[0].start).to.be.above(0); expect(jsonData[0].end).to.be.within(1179, 1180); done(); }); }); it ('correctly applies RAIL type when time range is specified', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data, { types: { 'sideNavAnimation': bigrig.ANIMATION } }); expect(jsonData[0].type).to.equal(bigrig.ANIMATION); done(); }); }); it ('correctly infers RAIL Load when time range not specified', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].type).to.equal(bigrig.LOAD); expect(jsonData[0].title).to.equal('Load'); done(); }); }); it ('correctly infers RAIL Response when time range not specified', function (done) { fs.readFile('./test/data/response.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].type).to.equal(bigrig.RESPONSE); expect(jsonData[0].title).to.equal('sideNavResponse'); done(); }); }); it ('correctly infers RAIL Animation when time range not specified', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].type).to.equal(bigrig.ANIMATION); expect(jsonData[0].title).to.equal('sideNavAnimation'); done(); }); }); it ('correctly infers multiple RAIL regions', function (done) { fs.readFile('./test/data/response-animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData.length).to.equal(2); expect(jsonData[0].type).to.equal(bigrig.RESPONSE); expect(jsonData[0].title).to.equal('sideNavResponse'); expect(jsonData[1].type).to.equal(bigrig.ANIMATION); expect(jsonData[1].title).to.equal('sideNavAnimation'); done(); }); }); it ('returns the correct fps for animations', function (done) { fs.readFile('./test/data/animation.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect(jsonData[0].fps).to.be.within(59, 61); done(); }); }); it ('returns the correct JS breakdown', function (done) { fs.readFile('./test/data/load.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect( jsonData[0].extendedInfo.javaScript['localhost:11080'] ).to.be.within(245, 246); expect( jsonData[0].extendedInfo.javaScript['www.google-analytics.com'] ).to.be.within(59, 60); done(); }); }); it ('correctly captures forced layouts and recalcs', function (done) { fs.readFile('./test/data/forced-recalc-layout.json', 'utf8', function (err, data) { if (err) { throw err; } var jsonData = bigrig.analyze(data); expect( jsonData[0].extendedInfo.forcedRecalcs ).to.equal(1); expect( jsonData[0].extendedInfo.forcedLayouts ).to.equal(1); done(); }); }); });
googlearchive/node-big-rig
test/bigrig_tests.js
JavaScript
apache-2.0
6,062
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use :mod:`airflow.providers.qubole.operators.qubole`.""" import warnings # pylint: disable=unused-import from airflow.providers.qubole.operators.qubole import QuboleOperator # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.qubole.operators.qubole`.", DeprecationWarning, stacklevel=2, )
nathanielvarona/airflow
airflow/contrib/operators/qubole_operator.py
Python
apache-2.0
1,158
function TorneoGolfWindow(Window) { window1 = Titanium.UI.createWindow({ tabBarHidden : true, backgroundColor : "white", width : '100%', height : '100%', layout : 'vertical' }); table = Ti.UI.createTableView({ width : '90%', height : '100%' }); scrollView_1 = Titanium.UI.createView({ id : "scrollView_1", backgroundImage : '/images/background.png', height : '100%', width : '100%', layout : 'vertical' }); scrollView_1.add(table); imageViewBar = Titanium.UI.createView({ id : "imageViewBar", backgroundColor : Ti.App.Properties.getString('viewcolor'), height : 80, left : 0, top : 0, width : '100%', layout : 'horizontal' }); imageView = Titanium.UI.createImageView({ id : "imageView", image : "/images/icongolf.png", width : 60, height : 60, top : 7, right : 3 }); imageViewBar.add(imageView); labelTitulo = Titanium.UI.createLabel({ id : "labelTitulo", height : 'auto', width : '70%', text : L('golf'), font : { fontSize : '22dp' }, color : 'white', textAlign : Ti.UI.TEXT_ALIGNMENT_CENTER }); imageViewBar.add(labelTitulo); buttonClose = Titanium.UI.createImageView({ id : "buttonClose", image : "/images/close.png", width : 30, height : 30, top : 25 }); imageViewBar.add(buttonClose); window1.add(imageViewBar); window1.add(scrollView_1); function populateTable() { var data = []; var row = Titanium.UI.createTableViewRow({ id : 2, title : 'Horarios', leftImage : '/images/horarios.png', isparent : true, opened : false, hasChild : false, font : { fontSize : '22dp' }, color : 'black' }); data.push(row); var row = Titanium.UI.createTableViewRow({ id : 3, title : 'Mapa', leftImage : '/images/mapa.png', isparent : true, opened : false, hasChild : false, font : { fontSize : '22dp' }, color : 'black' }); data.push(row); table.setData(data); } populateTable(); table.addEventListener('click', function(e) { if (e.rowData.id == 2) { var Window; var mainWindow = require("ui/handheld/golf/HorariosWindow"); new mainWindow(Window).open(); } else if (e.rowData.id == 3) { var Window; var mainWindow = require("ui/handheld/mapa/MapaWindow"); new mainWindow(Window).open(); } }); buttonClose.addEventListener('click', function(e) { Ti.Media.vibrate(); var Window; var mainWindow = require("ui/handheld/MainWindow"); new mainWindow(Window).open(); }); window1.addEventListener('android:back', function(e) { Ti.Media.vibrate(); var Window; var mainWindow = require("ui/handheld/MainWindow"); new mainWindow(Window).open(); }); return window1; } module.exports = TorneoGolfWindow;
emobile/expomobile_mobile
Resources/ui/tablet/TorneoGolfWindow.js
JavaScript
apache-2.0
2,863
package eu.atos.sla.dao.jpa; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.persistence.Query; import javax.persistence.TypedQuery; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import eu.atos.sla.dao.ITemplateDAO; import eu.atos.sla.datamodel.ITemplate; import eu.atos.sla.datamodel.bean.Template; @Repository("TemplateRepository") public class TemplateDAOJpa implements ITemplateDAO { private static Logger logger = LoggerFactory.getLogger(TemplateDAOJpa.class); private EntityManager entityManager; @PersistenceContext(unitName = "slarepositoryDB") public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } public EntityManager getEntityManager() { return entityManager; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public Template getById(Long id) { return entityManager.find(Template.class, id); } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public Template getByUuid(String uuid) { try { Query query = entityManager .createNamedQuery(Template.QUERY_FIND_BY_UUID); query.setParameter("uuid", uuid); Template template = null; template = (Template) query.getSingleResult(); return template; } catch (NoResultException e) { logger.debug("No Result found: " + e); return null; } } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> search(String providerId, String []serviceIds) { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_SEARCH, ITemplate.class); query.setParameter("providerId", providerId); query.setParameter("serviceIds", (serviceIds!=null)?Arrays.asList(serviceIds):null); query.setParameter("flagServiceIds", (serviceIds!=null)?"flag":null); logger.debug("providerId:{} - serviceIds:{}" , providerId, (serviceIds!=null)?Arrays.asList(serviceIds):null); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> getByAgreement(String agreement) { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_FIND_BY_AGREEMENT, ITemplate.class); query.setParameter("agreement", agreement); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> getAll() { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_FIND_ALL, ITemplate.class); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Override @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public ITemplate save(ITemplate template) { logger.info("template.getUuid() "+template.getUuid()); entityManager.persist(template); entityManager.flush(); return template; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public boolean update(String uuid, ITemplate template) { Template templateDB = null; try { Query query = entityManager.createNamedQuery(Template.QUERY_FIND_BY_UUID); query.setParameter("uuid", uuid); templateDB = (Template)query.getSingleResult(); } catch (NoResultException e) { logger.debug("No Result found: " + e); } if (templateDB!=null){ template.setId(templateDB.getId()); logger.info("template to update with id"+template.getId()); entityManager.merge(template); entityManager.flush(); }else return false; return true; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public boolean delete(ITemplate template) { try { Template templateDeleted = entityManager.getReference(Template.class, template.getId()); entityManager.remove(templateDeleted); entityManager.flush(); return true; } catch (EntityNotFoundException e) { logger.debug("Template[{}] not found", template.getId()); return false; } } }
Atos-FiwareOps/sla-framework
sla-core/sla-repository/src/main/java/eu/atos/sla/dao/jpa/TemplateDAOJpa.java
Java
apache-2.0
5,164
/*-------------------------------------------------------------------------- * linq.js - LINQ for JavaScript * ver 3.0.3-Beta4 (Oct. 9th, 2012) * * created and maintained by neuecc <ils@neue.cc> * licensed under MIT License * http://linqjs.codeplex.com/ *------------------------------------------------------------------------*/ (function (root, undefined) { // ReadOnly Function var Functions = { Identity: function (x) { return x; }, True: function () { return true; }, Blank: function () { } }; // const Type var Types = { Boolean: typeof true, Number: typeof 0, String: typeof "", Object: typeof {}, Undefined: typeof undefined, Function: typeof function () { } }; // private utility methods var Utils = { // Create anonymous function from lambda expression string createLambda: function (expression) { if (expression == null) return Functions.Identity; if (typeof expression == Types.String) { if (expression == "") { return Functions.Identity; } else if (expression.indexOf("=>") == -1) { var regexp = new RegExp("[$]+", "g"); var maxLength = 0; var match; while (match = regexp.exec(expression)) { var paramNumber = match[0].length; if (paramNumber > maxLength) { maxLength = paramNumber; } } var argArray = []; for (var i = 1; i <= maxLength; i++) { var dollar = ""; for (var j = 0; j < i; j++) { dollar += "$"; } argArray.push(dollar); } var args = Array.prototype.join.call(argArray, ","); return new Function(args, "return " + expression); } else { var expr = expression.match(/^[(\s]*([^()]*?)[)\s]*=>(.*)/); return new Function(expr[1], "return " + expr[2]); } } return expression; }, isIEnumerable: function (obj) { if (typeof Enumerator !== Types.Undefined) { try { new Enumerator(obj); // check JScript(IE)'s Enumerator return true; } catch (e) { } } return false; }, // IE8's defineProperty is defined but cannot use, therefore check defineProperties defineProperty: (Object.defineProperties != null) ? function (target, methodName, value) { Object.defineProperty(target, methodName, { enumerable: false, configurable: true, writable: true, value: value }) } : function (target, methodName, value) { target[methodName] = value; }, compare: function (a, b) { return (a === b) ? 0 : (a > b) ? 1 : -1; }, dispose: function (obj) { if (obj != null) obj.dispose(); } }; // IEnumerator State var State = { Before: 0, Running: 1, After: 2 }; // "Enumerator" is conflict JScript's "Enumerator" var IEnumerator = function (initialize, tryGetNext, dispose) { var yielder = new Yielder(); var state = State.Before; this.current = yielder.current; this.moveNext = function () { try { switch (state) { case State.Before: state = State.Running; initialize(); // fall through case State.Running: if (tryGetNext.apply(yielder)) { return true; } else { this.dispose(); return false; } case State.After: return false; } } catch (e) { this.dispose(); throw e; } }; this.dispose = function () { if (state != State.Running) return; try { dispose(); } finally { state = State.After; } }; }; // for tryGetNext var Yielder = function () { var current = null; this.current = function () { return current; }; this.yieldReturn = function (value) { current = value; return true; }; this.yieldBreak = function () { return false; }; }; // Enumerable constuctor var Enumerable = function (getEnumerator) { this.getEnumerator = getEnumerator; }; // Utility Enumerable.Utils = {}; // container Enumerable.Utils.createLambda = function (expression) { return Utils.createLambda(expression); }; Enumerable.Utils.createEnumerable = function (getEnumerator) { return new Enumerable(getEnumerator); }; Enumerable.Utils.createEnumerator = function (initialize, tryGetNext, dispose) { return new IEnumerator(initialize, tryGetNext, dispose); }; Enumerable.Utils.extendTo = function (type) { var typeProto = type.prototype; var enumerableProto; if (type === Array) { enumerableProto = ArrayEnumerable.prototype; Utils.defineProperty(typeProto, "getSource", function () { return this; }); } else { enumerableProto = Enumerable.prototype; Utils.defineProperty(typeProto, "getEnumerator", function () { return Enumerable.from(this).getEnumerator(); }); } for (var methodName in enumerableProto) { var func = enumerableProto[methodName]; // already extended if (typeProto[methodName] == func) continue; // already defined(example Array#reverse/join/forEach...) if (typeProto[methodName] != null) { methodName = methodName + "ByLinq"; if (typeProto[methodName] == func) continue; // recheck } if (func instanceof Function) { Utils.defineProperty(typeProto, methodName, func); } } }; // Generator Enumerable.choice = function () // variable argument { var args = arguments; return new Enumerable(function () { return new IEnumerator( function () { args = (args[0] instanceof Array) ? args[0] : (args[0].getEnumerator != null) ? args[0].toArray() : args; }, function () { return this.yieldReturn(args[Math.floor(Math.random() * args.length)]); }, Functions.Blank); }); }; Enumerable.cycle = function () // variable argument { var args = arguments; return new Enumerable(function () { var index = 0; return new IEnumerator( function () { args = (args[0] instanceof Array) ? args[0] : (args[0].getEnumerator != null) ? args[0].toArray() : args; }, function () { if (index >= args.length) index = 0; return this.yieldReturn(args[index++]); }, Functions.Blank); }); }; Enumerable.empty = function () { return new Enumerable(function () { return new IEnumerator( Functions.Blank, function () { return false; }, Functions.Blank); }); }; Enumerable.from = function (obj) { if (obj == null) { return Enumerable.empty(); } if (obj instanceof Enumerable) { return obj; } if (typeof obj == Types.Number || typeof obj == Types.Boolean) { return Enumerable.repeat(obj, 1); } if (typeof obj == Types.String) { return new Enumerable(function () { var index = 0; return new IEnumerator( Functions.Blank, function () { return (index < obj.length) ? this.yieldReturn(obj.charAt(index++)) : false; }, Functions.Blank); }); } if (typeof obj != Types.Function) { // array or array like object if (typeof obj.length == Types.Number) { return new ArrayEnumerable(obj); } // JScript's IEnumerable if (!(obj instanceof Object) && Utils.isIEnumerable(obj)) { return new Enumerable(function () { var isFirst = true; var enumerator; return new IEnumerator( function () { enumerator = new Enumerator(obj); }, function () { if (isFirst) isFirst = false; else enumerator.moveNext(); return (enumerator.atEnd()) ? false : this.yieldReturn(enumerator.item()); }, Functions.Blank); }); } // WinMD IIterable<T> if (typeof Windows === Types.Object && typeof obj.first === Types.Function) { return new Enumerable(function () { var isFirst = true; var enumerator; return new IEnumerator( function () { enumerator = obj.first(); }, function () { if (isFirst) isFirst = false; else enumerator.moveNext(); return (enumerator.hasCurrent) ? this.yieldReturn(enumerator.current) : this.yieldBreak(); }, Functions.Blank); }); } } // case function/object : Create keyValuePair[] return new Enumerable(function () { var array = []; var index = 0; return new IEnumerator( function () { for (var key in obj) { var value = obj[key]; if (!(value instanceof Function) && Object.prototype.hasOwnProperty.call(obj, key)) { array.push({ key: key, value: value }); } } }, function () { return (index < array.length) ? this.yieldReturn(array[index++]) : false; }, Functions.Blank); }); }, Enumerable.make = function (element) { return Enumerable.repeat(element, 1); }; // Overload:function(input, pattern) // Overload:function(input, pattern, flags) Enumerable.matches = function (input, pattern, flags) { if (flags == null) flags = ""; if (pattern instanceof RegExp) { flags += (pattern.ignoreCase) ? "i" : ""; flags += (pattern.multiline) ? "m" : ""; pattern = pattern.source; } if (flags.indexOf("g") === -1) flags += "g"; return new Enumerable(function () { var regex; return new IEnumerator( function () { regex = new RegExp(pattern, flags); }, function () { var match = regex.exec(input); return (match) ? this.yieldReturn(match) : false; }, Functions.Blank); }); }; // Overload:function(start, count) // Overload:function(start, count, step) Enumerable.range = function (start, count, step) { if (step == null) step = 1; return new Enumerable(function () { var value; var index = 0; return new IEnumerator( function () { value = start - step; }, function () { return (index++ < count) ? this.yieldReturn(value += step) : this.yieldBreak(); }, Functions.Blank); }); }; // Overload:function(start, count) // Overload:function(start, count, step) Enumerable.rangeDown = function (start, count, step) { if (step == null) step = 1; return new Enumerable(function () { var value; var index = 0; return new IEnumerator( function () { value = start + step; }, function () { return (index++ < count) ? this.yieldReturn(value -= step) : this.yieldBreak(); }, Functions.Blank); }); }; // Overload:function(start, to) // Overload:function(start, to, step) Enumerable.rangeTo = function (start, to, step) { if (step == null) step = 1; if (start < to) { return new Enumerable(function () { var value; return new IEnumerator( function () { value = start - step; }, function () { var next = value += step; return (next <= to) ? this.yieldReturn(next) : this.yieldBreak(); }, Functions.Blank); }); } else { return new Enumerable(function () { var value; return new IEnumerator( function () { value = start + step; }, function () { var next = value -= step; return (next >= to) ? this.yieldReturn(next) : this.yieldBreak(); }, Functions.Blank); }); } }; // Overload:function(element) // Overload:function(element, count) Enumerable.repeat = function (element, count) { if (count != null) return Enumerable.repeat(element).take(count); return new Enumerable(function () { return new IEnumerator( Functions.Blank, function () { return this.yieldReturn(element); }, Functions.Blank); }); }; Enumerable.repeatWithFinalize = function (initializer, finalizer) { initializer = Utils.createLambda(initializer); finalizer = Utils.createLambda(finalizer); return new Enumerable(function () { var element; return new IEnumerator( function () { element = initializer(); }, function () { return this.yieldReturn(element); }, function () { if (element != null) { finalizer(element); element = null; } }); }); }; // Overload:function(func) // Overload:function(func, count) Enumerable.generate = function (func, count) { if (count != null) return Enumerable.generate(func).take(count); func = Utils.createLambda(func); return new Enumerable(function () { return new IEnumerator( Functions.Blank, function () { return this.yieldReturn(func()); }, Functions.Blank); }); }; // Overload:function() // Overload:function(start) // Overload:function(start, step) Enumerable.toInfinity = function (start, step) { if (start == null) start = 0; if (step == null) step = 1; return new Enumerable(function () { var value; return new IEnumerator( function () { value = start - step; }, function () { return this.yieldReturn(value += step); }, Functions.Blank); }); }; // Overload:function() // Overload:function(start) // Overload:function(start, step) Enumerable.toNegativeInfinity = function (start, step) { if (start == null) start = 0; if (step == null) step = 1; return new Enumerable(function () { var value; return new IEnumerator( function () { value = start + step; }, function () { return this.yieldReturn(value -= step); }, Functions.Blank); }); }; Enumerable.unfold = function (seed, func) { func = Utils.createLambda(func); return new Enumerable(function () { var isFirst = true; var value; return new IEnumerator( Functions.Blank, function () { if (isFirst) { isFirst = false; value = seed; return this.yieldReturn(value); } value = func(value); return this.yieldReturn(value); }, Functions.Blank); }); }; Enumerable.defer = function (enumerableFactory) { return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = Enumerable.from(enumerableFactory()).getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : this.yieldBreak(); }, function () { Utils.dispose(enumerator); }); }); }; // Extension Methods /* Projection and Filtering Methods */ // Overload:function(func) // Overload:function(func, resultSelector<element>) // Overload:function(func, resultSelector<element, nestLevel>) Enumerable.prototype.traverseBreadthFirst = function (func, resultSelector) { var source = this; func = Utils.createLambda(func); resultSelector = Utils.createLambda(resultSelector); return new Enumerable(function () { var enumerator; var nestLevel = 0; var buffer = []; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (true) { if (enumerator.moveNext()) { buffer.push(enumerator.current()); return this.yieldReturn(resultSelector(enumerator.current(), nestLevel)); } var next = Enumerable.from(buffer).selectMany(function (x) { return func(x); }); if (!next.any()) { return false; } else { nestLevel++; buffer = []; Utils.dispose(enumerator); enumerator = next.getEnumerator(); } } }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(func) // Overload:function(func, resultSelector<element>) // Overload:function(func, resultSelector<element, nestLevel>) Enumerable.prototype.traverseDepthFirst = function (func, resultSelector) { var source = this; func = Utils.createLambda(func); resultSelector = Utils.createLambda(resultSelector); return new Enumerable(function () { var enumeratorStack = []; var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (true) { if (enumerator.moveNext()) { var value = resultSelector(enumerator.current(), enumeratorStack.length); enumeratorStack.push(enumerator); enumerator = Enumerable.from(func(enumerator.current())).getEnumerator(); return this.yieldReturn(value); } if (enumeratorStack.length <= 0) return false; Utils.dispose(enumerator); enumerator = enumeratorStack.pop(); } }, function () { try { Utils.dispose(enumerator); } finally { Enumerable.from(enumeratorStack).forEach(function (s) { s.dispose(); }); } }); }); }; Enumerable.prototype.flatten = function () { var source = this; return new Enumerable(function () { var enumerator; var middleEnumerator = null; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (true) { if (middleEnumerator != null) { if (middleEnumerator.moveNext()) { return this.yieldReturn(middleEnumerator.current()); } else { middleEnumerator = null; } } if (enumerator.moveNext()) { if (enumerator.current() instanceof Array) { Utils.dispose(middleEnumerator); middleEnumerator = Enumerable.from(enumerator.current()) .selectMany(Functions.Identity) .flatten() .getEnumerator(); continue; } else { return this.yieldReturn(enumerator.current()); } } return false; } }, function () { try { Utils.dispose(enumerator); } finally { Utils.dispose(middleEnumerator); } }); }); }; Enumerable.prototype.pairwise = function (selector) { var source = this; selector = Utils.createLambda(selector); return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); enumerator.moveNext(); }, function () { var prev = enumerator.current(); return (enumerator.moveNext()) ? this.yieldReturn(selector(prev, enumerator.current())) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(func) // Overload:function(seed,func<value,element>) Enumerable.prototype.scan = function (seed, func) { var isUseSeed; if (func == null) { func = Utils.createLambda(seed); // arguments[0] isUseSeed = false; } else { func = Utils.createLambda(func); isUseSeed = true; } var source = this; return new Enumerable(function () { var enumerator; var value; var isFirst = true; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (isFirst) { isFirst = false; if (!isUseSeed) { if (enumerator.moveNext()) { return this.yieldReturn(value = enumerator.current()); } } else { return this.yieldReturn(value = seed); } } return (enumerator.moveNext()) ? this.yieldReturn(value = func(value, enumerator.current())) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(selector<element>) // Overload:function(selector<element,index>) Enumerable.prototype.select = function (selector) { selector = Utils.createLambda(selector); if (selector.length <= 1) { return new WhereSelectEnumerable(this, null, selector); } else { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(selector(enumerator.current(), index++)) : false; }, function () { Utils.dispose(enumerator); }); }); } }; // Overload:function(collectionSelector<element>) // Overload:function(collectionSelector<element,index>) // Overload:function(collectionSelector<element>,resultSelector) // Overload:function(collectionSelector<element,index>,resultSelector) Enumerable.prototype.selectMany = function (collectionSelector, resultSelector) { var source = this; collectionSelector = Utils.createLambda(collectionSelector); if (resultSelector == null) resultSelector = function (a, b) { return b; }; resultSelector = Utils.createLambda(resultSelector); return new Enumerable(function () { var enumerator; var middleEnumerator = undefined; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (middleEnumerator === undefined) { if (!enumerator.moveNext()) return false; } do { if (middleEnumerator == null) { var middleSeq = collectionSelector(enumerator.current(), index++); middleEnumerator = Enumerable.from(middleSeq).getEnumerator(); } if (middleEnumerator.moveNext()) { return this.yieldReturn(resultSelector(enumerator.current(), middleEnumerator.current())); } Utils.dispose(middleEnumerator); middleEnumerator = null; } while (enumerator.moveNext()); return false; }, function () { try { Utils.dispose(enumerator); } finally { Utils.dispose(middleEnumerator); } }); }); }; // Overload:function(predicate<element>) // Overload:function(predicate<element,index>) Enumerable.prototype.where = function (predicate) { predicate = Utils.createLambda(predicate); if (predicate.length <= 1) { return new WhereEnumerable(this, predicate); } else { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (predicate(enumerator.current(), index++)) { return this.yieldReturn(enumerator.current()); } } return false; }, function () { Utils.dispose(enumerator); }); }); } }; // Overload:function(selector<element>) // Overload:function(selector<element,index>) Enumerable.prototype.choose = function (selector) { selector = Utils.createLambda(selector); var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { var result = selector(enumerator.current(), index++); if (result != null) { return this.yieldReturn(result); } } return this.yieldBreak(); }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.ofType = function (type) { var typeName; switch (type) { case Number: typeName = Types.Number; break; case String: typeName = Types.String; break; case Boolean: typeName = Types.Boolean; break; case Function: typeName = Types.Function; break; default: typeName = null; break; } return (typeName === null) ? this.where(function (x) { return x instanceof type; }) : this.where(function (x) { return typeof x === typeName; }); }; // mutiple arguments, last one is selector, others are enumerable Enumerable.prototype.zip = function () { var args = arguments; var selector = Utils.createLambda(arguments[arguments.length - 1]); var source = this; // optimized case:argument is 2 if (arguments.length == 2) { var second = arguments[0]; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; var index = 0; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); secondEnumerator = Enumerable.from(second).getEnumerator(); }, function () { if (firstEnumerator.moveNext() && secondEnumerator.moveNext()) { return this.yieldReturn(selector(firstEnumerator.current(), secondEnumerator.current(), index++)); } return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); } else { return new Enumerable(function () { var enumerators; var index = 0; return new IEnumerator( function () { var array = Enumerable.make(source) .concat(Enumerable.from(args).takeExceptLast().select(Enumerable.from)) .select(function (x) { return x.getEnumerator() }) .toArray(); enumerators = Enumerable.from(array); }, function () { if (enumerators.all(function (x) { return x.moveNext() })) { var array = enumerators .select(function (x) { return x.current() }) .toArray(); array.push(index++); return this.yieldReturn(selector.apply(null, array)); } else { return this.yieldBreak(); } }, function () { Enumerable.from(enumerators).forEach(Utils.dispose); }); }); } }; // mutiple arguments Enumerable.prototype.merge = function () { var args = arguments; var source = this; return new Enumerable(function () { var enumerators; var index = -1; return new IEnumerator( function () { enumerators = Enumerable.make(source) .concat(Enumerable.from(args).select(Enumerable.from)) .select(function (x) { return x.getEnumerator() }) .toArray(); }, function () { while (enumerators.length > 0) { index = (index >= enumerators.length - 1) ? 0 : index + 1; var enumerator = enumerators[index]; if (enumerator.moveNext()) { return this.yieldReturn(enumerator.current()); } else { enumerator.dispose(); enumerators.splice(index--, 1); } } return this.yieldBreak(); }, function () { Enumerable.from(enumerators).forEach(Utils.dispose); }); }); }; /* Join Methods */ // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector) // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) Enumerable.prototype.join = function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) { outerKeySelector = Utils.createLambda(outerKeySelector); innerKeySelector = Utils.createLambda(innerKeySelector); resultSelector = Utils.createLambda(resultSelector); compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var outerEnumerator; var lookup; var innerElements = null; var innerCount = 0; return new IEnumerator( function () { outerEnumerator = source.getEnumerator(); lookup = Enumerable.from(inner).toLookup(innerKeySelector, Functions.Identity, compareSelector); }, function () { while (true) { if (innerElements != null) { var innerElement = innerElements[innerCount++]; if (innerElement !== undefined) { return this.yieldReturn(resultSelector(outerEnumerator.current(), innerElement)); } innerElement = null; innerCount = 0; } if (outerEnumerator.moveNext()) { var key = outerKeySelector(outerEnumerator.current()); innerElements = lookup.get(key).toArray(); } else { return false; } } }, function () { Utils.dispose(outerEnumerator); }); }); }; // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector) // Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) Enumerable.prototype.groupJoin = function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) { outerKeySelector = Utils.createLambda(outerKeySelector); innerKeySelector = Utils.createLambda(innerKeySelector); resultSelector = Utils.createLambda(resultSelector); compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator = source.getEnumerator(); var lookup = null; return new IEnumerator( function () { enumerator = source.getEnumerator(); lookup = Enumerable.from(inner).toLookup(innerKeySelector, Functions.Identity, compareSelector); }, function () { if (enumerator.moveNext()) { var innerElement = lookup.get(outerKeySelector(enumerator.current())); return this.yieldReturn(resultSelector(enumerator.current(), innerElement)); } return false; }, function () { Utils.dispose(enumerator); }); }); }; /* Set Methods */ Enumerable.prototype.all = function (predicate) { predicate = Utils.createLambda(predicate); var result = true; this.forEach(function (x) { if (!predicate(x)) { result = false; return false; // break } }); return result; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.any = function (predicate) { predicate = Utils.createLambda(predicate); var enumerator = this.getEnumerator(); try { if (arguments.length == 0) return enumerator.moveNext(); // case:function() while (enumerator.moveNext()) // case:function(predicate) { if (predicate(enumerator.current())) return true; } return false; } finally { Utils.dispose(enumerator); } }; Enumerable.prototype.isEmpty = function () { return !this.any(); }; // multiple arguments Enumerable.prototype.concat = function () { var source = this; if (arguments.length == 1) { var second = arguments[0]; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); }, function () { if (secondEnumerator == null) { if (firstEnumerator.moveNext()) return this.yieldReturn(firstEnumerator.current()); secondEnumerator = Enumerable.from(second).getEnumerator(); } if (secondEnumerator.moveNext()) return this.yieldReturn(secondEnumerator.current()); return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); } else { var args = arguments; return new Enumerable(function () { var enumerators; return new IEnumerator( function () { enumerators = Enumerable.make(source) .concat(Enumerable.from(args).select(Enumerable.from)) .select(function (x) { return x.getEnumerator() }) .toArray(); }, function () { while (enumerators.length > 0) { var enumerator = enumerators[0]; if (enumerator.moveNext()) { return this.yieldReturn(enumerator.current()); } else { enumerator.dispose(); enumerators.splice(0, 1); } } return this.yieldBreak(); }, function () { Enumerable.from(enumerators).forEach(Utils.dispose); }); }); } }; Enumerable.prototype.insert = function (index, second) { var source = this; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; var count = 0; var isEnumerated = false; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); secondEnumerator = Enumerable.from(second).getEnumerator(); }, function () { if (count == index && secondEnumerator.moveNext()) { isEnumerated = true; return this.yieldReturn(secondEnumerator.current()); } if (firstEnumerator.moveNext()) { count++; return this.yieldReturn(firstEnumerator.current()); } if (!isEnumerated && secondEnumerator.moveNext()) { return this.yieldReturn(secondEnumerator.current()); } return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); }; Enumerable.prototype.alternate = function (alternateValueOrSequence) { var source = this; return new Enumerable(function () { var buffer; var enumerator; var alternateSequence; var alternateEnumerator; return new IEnumerator( function () { if (alternateValueOrSequence instanceof Array || alternateValueOrSequence.getEnumerator != null) { alternateSequence = Enumerable.from(Enumerable.from(alternateValueOrSequence).toArray()); // freeze } else { alternateSequence = Enumerable.make(alternateValueOrSequence); } enumerator = source.getEnumerator(); if (enumerator.moveNext()) buffer = enumerator.current(); }, function () { while (true) { if (alternateEnumerator != null) { if (alternateEnumerator.moveNext()) { return this.yieldReturn(alternateEnumerator.current()); } else { alternateEnumerator = null; } } if (buffer == null && enumerator.moveNext()) { buffer = enumerator.current(); // hasNext alternateEnumerator = alternateSequence.getEnumerator(); continue; // GOTO } else if (buffer != null) { var retVal = buffer; buffer = null; return this.yieldReturn(retVal); } return this.yieldBreak(); } }, function () { try { Utils.dispose(enumerator); } finally { Utils.dispose(alternateEnumerator); } }); }); }; // Overload:function(value) // Overload:function(value, compareSelector) Enumerable.prototype.contains = function (value, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var enumerator = this.getEnumerator(); try { while (enumerator.moveNext()) { if (compareSelector(enumerator.current()) === value) return true; } return false; } finally { Utils.dispose(enumerator); } }; Enumerable.prototype.defaultIfEmpty = function (defaultValue) { var source = this; if (defaultValue === undefined) defaultValue = null; return new Enumerable(function () { var enumerator; var isFirst = true; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (enumerator.moveNext()) { isFirst = false; return this.yieldReturn(enumerator.current()); } else if (isFirst) { isFirst = false; return this.yieldReturn(defaultValue); } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function() // Overload:function(compareSelector) Enumerable.prototype.distinct = function (compareSelector) { return this.except(Enumerable.empty(), compareSelector); }; Enumerable.prototype.distinctUntilChanged = function (compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator; var compareKey; var initial; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { var key = compareSelector(enumerator.current()); if (initial) { initial = false; compareKey = key; return this.yieldReturn(enumerator.current()); } if (compareKey === key) { continue; } compareKey = key; return this.yieldReturn(enumerator.current()); } return this.yieldBreak(); }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(second) // Overload:function(second, compareSelector) Enumerable.prototype.except = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator; var keys; return new IEnumerator( function () { enumerator = source.getEnumerator(); keys = new Dictionary(compareSelector); Enumerable.from(second).forEach(function (key) { keys.add(key); }); }, function () { while (enumerator.moveNext()) { var current = enumerator.current(); if (!keys.contains(current)) { keys.add(current); return this.yieldReturn(current); } } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(second) // Overload:function(second, compareSelector) Enumerable.prototype.intersect = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var enumerator; var keys; var outs; return new IEnumerator( function () { enumerator = source.getEnumerator(); keys = new Dictionary(compareSelector); Enumerable.from(second).forEach(function (key) { keys.add(key); }); outs = new Dictionary(compareSelector); }, function () { while (enumerator.moveNext()) { var current = enumerator.current(); if (!outs.contains(current) && keys.contains(current)) { outs.add(current); return this.yieldReturn(current); } } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(second) // Overload:function(second, compareSelector) Enumerable.prototype.sequenceEqual = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var firstEnumerator = this.getEnumerator(); try { var secondEnumerator = Enumerable.from(second).getEnumerator(); try { while (firstEnumerator.moveNext()) { if (!secondEnumerator.moveNext() || compareSelector(firstEnumerator.current()) !== compareSelector(secondEnumerator.current())) { return false; } } if (secondEnumerator.moveNext()) return false; return true; } finally { Utils.dispose(secondEnumerator); } } finally { Utils.dispose(firstEnumerator); } }; Enumerable.prototype.union = function (second, compareSelector) { compareSelector = Utils.createLambda(compareSelector); var source = this; return new Enumerable(function () { var firstEnumerator; var secondEnumerator; var keys; return new IEnumerator( function () { firstEnumerator = source.getEnumerator(); keys = new Dictionary(compareSelector); }, function () { var current; if (secondEnumerator === undefined) { while (firstEnumerator.moveNext()) { current = firstEnumerator.current(); if (!keys.contains(current)) { keys.add(current); return this.yieldReturn(current); } } secondEnumerator = Enumerable.from(second).getEnumerator(); } while (secondEnumerator.moveNext()) { current = secondEnumerator.current(); if (!keys.contains(current)) { keys.add(current); return this.yieldReturn(current); } } return false; }, function () { try { Utils.dispose(firstEnumerator); } finally { Utils.dispose(secondEnumerator); } }); }); }; /* Ordering Methods */ Enumerable.prototype.orderBy = function (keySelector) { return new OrderedEnumerable(this, keySelector, false); }; Enumerable.prototype.orderByDescending = function (keySelector) { return new OrderedEnumerable(this, keySelector, true); }; Enumerable.prototype.reverse = function () { var source = this; return new Enumerable(function () { var buffer; var index; return new IEnumerator( function () { buffer = source.toArray(); index = buffer.length; }, function () { return (index > 0) ? this.yieldReturn(buffer[--index]) : false; }, Functions.Blank); }); }; Enumerable.prototype.shuffle = function () { var source = this; return new Enumerable(function () { var buffer; return new IEnumerator( function () { buffer = source.toArray(); }, function () { if (buffer.length > 0) { var i = Math.floor(Math.random() * buffer.length); return this.yieldReturn(buffer.splice(i, 1)[0]); } return false; }, Functions.Blank); }); }; Enumerable.prototype.weightedSample = function (weightSelector) { weightSelector = Utils.createLambda(weightSelector); var source = this; return new Enumerable(function () { var sortedByBound; var totalWeight = 0; return new IEnumerator( function () { sortedByBound = source .choose(function (x) { var weight = weightSelector(x); if (weight <= 0) return null; // ignore 0 totalWeight += weight; return { value: x, bound: totalWeight }; }) .toArray(); }, function () { if (sortedByBound.length > 0) { var draw = Math.floor(Math.random() * totalWeight) + 1; var lower = -1; var upper = sortedByBound.length; while (upper - lower > 1) { var index = Math.floor((lower + upper) / 2); if (sortedByBound[index].bound >= draw) { upper = index; } else { lower = index; } } return this.yieldReturn(sortedByBound[upper].value); } return this.yieldBreak(); }, Functions.Blank); }); }; /* Grouping Methods */ // Overload:function(keySelector) // Overload:function(keySelector,elementSelector) // Overload:function(keySelector,elementSelector,resultSelector) // Overload:function(keySelector,elementSelector,resultSelector,compareSelector) Enumerable.prototype.groupBy = function (keySelector, elementSelector, resultSelector, compareSelector) { var source = this; keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); if (resultSelector != null) resultSelector = Utils.createLambda(resultSelector); compareSelector = Utils.createLambda(compareSelector); return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.toLookup(keySelector, elementSelector, compareSelector) .toEnumerable() .getEnumerator(); }, function () { while (enumerator.moveNext()) { return (resultSelector == null) ? this.yieldReturn(enumerator.current()) : this.yieldReturn(resultSelector(enumerator.current().key(), enumerator.current())); } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(keySelector) // Overload:function(keySelector,elementSelector) // Overload:function(keySelector,elementSelector,resultSelector) // Overload:function(keySelector,elementSelector,resultSelector,compareSelector) Enumerable.prototype.partitionBy = function (keySelector, elementSelector, resultSelector, compareSelector) { var source = this; keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); compareSelector = Utils.createLambda(compareSelector); var hasResultSelector; if (resultSelector == null) { hasResultSelector = false; resultSelector = function (key, group) { return new Grouping(key, group); }; } else { hasResultSelector = true; resultSelector = Utils.createLambda(resultSelector); } return new Enumerable(function () { var enumerator; var key; var compareKey; var group = []; return new IEnumerator( function () { enumerator = source.getEnumerator(); if (enumerator.moveNext()) { key = keySelector(enumerator.current()); compareKey = compareSelector(key); group.push(elementSelector(enumerator.current())); } }, function () { var hasNext; while ((hasNext = enumerator.moveNext()) == true) { if (compareKey === compareSelector(keySelector(enumerator.current()))) { group.push(elementSelector(enumerator.current())); } else break; } if (group.length > 0) { var result = (hasResultSelector) ? resultSelector(key, Enumerable.from(group)) : resultSelector(key, group); if (hasNext) { key = keySelector(enumerator.current()); compareKey = compareSelector(key); group = [elementSelector(enumerator.current())]; } else group = []; return this.yieldReturn(result); } return false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.buffer = function (count) { var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { var array = []; var index = 0; while (enumerator.moveNext()) { array.push(enumerator.current()); if (++index >= count) return this.yieldReturn(array); } if (array.length > 0) return this.yieldReturn(array); return false; }, function () { Utils.dispose(enumerator); }); }); }; /* Aggregate Methods */ // Overload:function(func) // Overload:function(seed,func) // Overload:function(seed,func,resultSelector) Enumerable.prototype.aggregate = function (seed, func, resultSelector) { resultSelector = Utils.createLambda(resultSelector); return resultSelector(this.scan(seed, func, resultSelector).last()); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.average = function (selector) { selector = Utils.createLambda(selector); var sum = 0; var count = 0; this.forEach(function (x) { sum += selector(x); ++count; }); return sum / count; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.count = function (predicate) { predicate = (predicate == null) ? Functions.True : Utils.createLambda(predicate); var count = 0; this.forEach(function (x, i) { if (predicate(x, i))++count; }); return count; }; // Overload:function() // Overload:function(selector) Enumerable.prototype.max = function (selector) { if (selector == null) selector = Functions.Identity; return this.select(selector).aggregate(function (a, b) { return (a > b) ? a : b; }); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.min = function (selector) { if (selector == null) selector = Functions.Identity; return this.select(selector).aggregate(function (a, b) { return (a < b) ? a : b; }); }; Enumerable.prototype.maxBy = function (keySelector) { keySelector = Utils.createLambda(keySelector); return this.aggregate(function (a, b) { return (keySelector(a) > keySelector(b)) ? a : b; }); }; Enumerable.prototype.minBy = function (keySelector) { keySelector = Utils.createLambda(keySelector); return this.aggregate(function (a, b) { return (keySelector(a) < keySelector(b)) ? a : b; }); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.sum = function (selector) { if (selector == null) selector = Functions.Identity; return this.select(selector).aggregate(0, function (a, b) { return a + b; }); }; /* Paging Methods */ Enumerable.prototype.elementAt = function (index) { var value; var found = false; this.forEach(function (x, i) { if (i == index) { value = x; found = true; return false; } }); if (!found) throw new Error("index is less than 0 or greater than or equal to the number of elements in source."); return value; }; Enumerable.prototype.elementAtOrDefault = function (index, defaultValue) { if (defaultValue === undefined) defaultValue = null; var value; var found = false; this.forEach(function (x, i) { if (i == index) { value = x; found = true; return false; } }); return (!found) ? defaultValue : value; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.first = function (predicate) { if (predicate != null) return this.where(predicate).first(); var value; var found = false; this.forEach(function (x) { value = x; found = true; return false; }); if (!found) throw new Error("first:No element satisfies the condition."); return value; }; Enumerable.prototype.firstOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) return this.where(predicate).firstOrDefault(null, defaultValue); var value; var found = false; this.forEach(function (x) { value = x; found = true; return false; }); return (!found) ? defaultValue : value; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.last = function (predicate) { if (predicate != null) return this.where(predicate).last(); var value; var found = false; this.forEach(function (x) { found = true; value = x; }); if (!found) throw new Error("last:No element satisfies the condition."); return value; }; // Overload:function(defaultValue) // Overload:function(defaultValue,predicate) Enumerable.prototype.lastOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) return this.where(predicate).lastOrDefault(null, defaultValue); var value; var found = false; this.forEach(function (x) { found = true; value = x; }); return (!found) ? defaultValue : value; }; // Overload:function() // Overload:function(predicate) Enumerable.prototype.single = function (predicate) { if (predicate != null) return this.where(predicate).single(); var value; var found = false; this.forEach(function (x) { if (!found) { found = true; value = x; } else throw new Error("single:sequence contains more than one element."); }); if (!found) throw new Error("single:No element satisfies the condition."); return value; }; // Overload:function(defaultValue) // Overload:function(defaultValue,predicate) Enumerable.prototype.singleOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) return this.where(predicate).singleOrDefault(null, defaultValue); var value; var found = false; this.forEach(function (x) { if (!found) { found = true; value = x; } else throw new Error("single:sequence contains more than one element."); }); return (!found) ? defaultValue : value; }; Enumerable.prototype.skip = function (count) { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); while (index++ < count && enumerator.moveNext()) { } ; }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(predicate<element>) // Overload:function(predicate<element,index>) Enumerable.prototype.skipWhile = function (predicate) { predicate = Utils.createLambda(predicate); var source = this; return new Enumerable(function () { var enumerator; var index = 0; var isSkipEnd = false; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (!isSkipEnd) { if (enumerator.moveNext()) { if (!predicate(enumerator.current(), index++)) { isSkipEnd = true; return this.yieldReturn(enumerator.current()); } continue; } else return false; } return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.take = function (count) { var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (index++ < count && enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); } ); }); }; // Overload:function(predicate<element>) // Overload:function(predicate<element,index>) Enumerable.prototype.takeWhile = function (predicate) { predicate = Utils.createLambda(predicate); var source = this; return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (enumerator.moveNext() && predicate(enumerator.current(), index++)) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function() // Overload:function(count) Enumerable.prototype.takeExceptLast = function (count) { if (count == null) count = 1; var source = this; return new Enumerable(function () { if (count <= 0) return source.getEnumerator(); // do nothing var enumerator; var q = []; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (q.length == count) { q.push(enumerator.current()); return this.yieldReturn(q.shift()); } q.push(enumerator.current()); } return false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.takeFromLast = function (count) { if (count <= 0 || count == null) return Enumerable.empty(); var source = this; return new Enumerable(function () { var sourceEnumerator; var enumerator; var q = []; return new IEnumerator( function () { sourceEnumerator = source.getEnumerator(); }, function () { while (sourceEnumerator.moveNext()) { if (q.length == count) q.shift(); q.push(sourceEnumerator.current()); } if (enumerator == null) { enumerator = Enumerable.from(q).getEnumerator(); } return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(item) // Overload:function(predicate) Enumerable.prototype.indexOf = function (item) { var found = null; // item as predicate if (typeof (item) === Types.Function) { this.forEach(function (x, i) { if (item(x, i)) { found = i; return false; } }); } else { this.forEach(function (x, i) { if (x === item) { found = i; return false; } }); } return (found !== null) ? found : -1; }; // Overload:function(item) // Overload:function(predicate) Enumerable.prototype.lastIndexOf = function (item) { var result = -1; // item as predicate if (typeof (item) === Types.Function) { this.forEach(function (x, i) { if (item(x, i)) result = i; }); } else { this.forEach(function (x, i) { if (x === item) result = i; }); } return result; }; /* Convert Methods */ Enumerable.prototype.asEnumerable = function () { return Enumerable.from(this); }; Enumerable.prototype.toArray = function () { var array = []; this.forEach(function (x) { array.push(x); }); return array; }; // Overload:function(keySelector) // Overload:function(keySelector, elementSelector) // Overload:function(keySelector, elementSelector, compareSelector) Enumerable.prototype.toLookup = function (keySelector, elementSelector, compareSelector) { keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); compareSelector = Utils.createLambda(compareSelector); var dict = new Dictionary(compareSelector); this.forEach(function (x) { var key = keySelector(x); var element = elementSelector(x); var array = dict.get(key); if (array !== undefined) array.push(element); else dict.add(key, [element]); }); return new Lookup(dict); }; Enumerable.prototype.toObject = function (keySelector, elementSelector) { keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); var obj = {}; this.forEach(function (x) { obj[keySelector(x)] = elementSelector(x); }); return obj; }; // Overload:function(keySelector, elementSelector) // Overload:function(keySelector, elementSelector, compareSelector) Enumerable.prototype.toDictionary = function (keySelector, elementSelector, compareSelector) { keySelector = Utils.createLambda(keySelector); elementSelector = Utils.createLambda(elementSelector); compareSelector = Utils.createLambda(compareSelector); var dict = new Dictionary(compareSelector); this.forEach(function (x) { dict.add(keySelector(x), elementSelector(x)); }); return dict; }; // Overload:function() // Overload:function(replacer) // Overload:function(replacer, space) Enumerable.prototype.toJSONString = function (replacer, space) { if (typeof JSON === Types.Undefined || JSON.stringify == null) { throw new Error("toJSONString can't find JSON.stringify. This works native JSON support Browser or include json2.js"); } return JSON.stringify(this.toArray(), replacer, space); }; // Overload:function() // Overload:function(separator) // Overload:function(separator,selector) Enumerable.prototype.toJoinedString = function (separator, selector) { if (separator == null) separator = ""; if (selector == null) selector = Functions.Identity; return this.select(selector).toArray().join(separator); }; /* Action Methods */ // Overload:function(action<element>) // Overload:function(action<element,index>) Enumerable.prototype.doAction = function (action) { var source = this; action = Utils.createLambda(action); return new Enumerable(function () { var enumerator; var index = 0; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { if (enumerator.moveNext()) { action(enumerator.current(), index++); return this.yieldReturn(enumerator.current()); } return false; }, function () { Utils.dispose(enumerator); }); }); }; // Overload:function(action<element>) // Overload:function(action<element,index>) // Overload:function(func<element,bool>) // Overload:function(func<element,index,bool>) Enumerable.prototype.forEach = function (action) { action = Utils.createLambda(action); var index = 0; var enumerator = this.getEnumerator(); try { while (enumerator.moveNext()) { if (action(enumerator.current(), index++) === false) break; } } finally { Utils.dispose(enumerator); } }; // Overload:function() // Overload:function(separator) // Overload:function(separator,selector) Enumerable.prototype.write = function (separator, selector) { if (separator == null) separator = ""; selector = Utils.createLambda(selector); var isFirst = true; this.forEach(function (item) { if (isFirst) isFirst = false; else document.write(separator); document.write(selector(item)); }); }; // Overload:function() // Overload:function(selector) Enumerable.prototype.writeLine = function (selector) { selector = Utils.createLambda(selector); this.forEach(function (item) { document.writeln(selector(item) + "<br />"); }); }; Enumerable.prototype.force = function () { var enumerator = this.getEnumerator(); try { while (enumerator.moveNext()) { } } finally { Utils.dispose(enumerator); } }; /* Functional Methods */ Enumerable.prototype.letBind = function (func) { func = Utils.createLambda(func); var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = Enumerable.from(func(source)).getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.share = function () { var source = this; var sharedEnumerator; var disposed = false; return new DisposableEnumerable(function () { return new IEnumerator( function () { if (sharedEnumerator == null) { sharedEnumerator = source.getEnumerator(); } }, function () { if (disposed) throw new Error("enumerator is disposed"); return (sharedEnumerator.moveNext()) ? this.yieldReturn(sharedEnumerator.current()) : false; }, Functions.Blank ); }, function () { disposed = true; Utils.dispose(sharedEnumerator); }); }; Enumerable.prototype.memoize = function () { var source = this; var cache; var enumerator; var disposed = false; return new DisposableEnumerable(function () { var index = -1; return new IEnumerator( function () { if (enumerator == null) { enumerator = source.getEnumerator(); cache = []; } }, function () { if (disposed) throw new Error("enumerator is disposed"); index++; if (cache.length <= index) { return (enumerator.moveNext()) ? this.yieldReturn(cache[index] = enumerator.current()) : false; } return this.yieldReturn(cache[index]); }, Functions.Blank ); }, function () { disposed = true; Utils.dispose(enumerator); cache = null; }); }; /* Error Handling Methods */ Enumerable.prototype.catchError = function (handler) { handler = Utils.createLambda(handler); var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { try { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; } catch (e) { handler(e); return false; } }, function () { Utils.dispose(enumerator); }); }); }; Enumerable.prototype.finallyAction = function (finallyAction) { finallyAction = Utils.createLambda(finallyAction); var source = this; return new Enumerable(function () { var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { return (enumerator.moveNext()) ? this.yieldReturn(enumerator.current()) : false; }, function () { try { Utils.dispose(enumerator); } finally { finallyAction(); } }); }); }; /* For Debug Methods */ // Overload:function() // Overload:function(selector) Enumerable.prototype.log = function (selector) { selector = Utils.createLambda(selector); return this.doAction(function (item) { if (typeof console !== Types.Undefined) { console.log(selector(item)); } }); }; // Overload:function() // Overload:function(message) // Overload:function(message,selector) Enumerable.prototype.trace = function (message, selector) { if (message == null) message = "Trace"; selector = Utils.createLambda(selector); return this.doAction(function (item) { if (typeof console !== Types.Undefined) { console.log(message, selector(item)); } }); }; // private var OrderedEnumerable = function (source, keySelector, descending, parent) { this.source = source; this.keySelector = Utils.createLambda(keySelector); this.descending = descending; this.parent = parent; }; OrderedEnumerable.prototype = new Enumerable(); OrderedEnumerable.prototype.createOrderedEnumerable = function (keySelector, descending) { return new OrderedEnumerable(this.source, keySelector, descending, this); }; OrderedEnumerable.prototype.thenBy = function (keySelector) { return this.createOrderedEnumerable(keySelector, false); }; OrderedEnumerable.prototype.thenByDescending = function (keySelector) { return this.createOrderedEnumerable(keySelector, true); }; OrderedEnumerable.prototype.getEnumerator = function () { var self = this; var buffer; var indexes; var index = 0; return new IEnumerator( function () { buffer = []; indexes = []; self.source.forEach(function (item, index) { buffer.push(item); indexes.push(index); }); var sortContext = SortContext.create(self, null); sortContext.GenerateKeys(buffer); indexes.sort(function (a, b) { return sortContext.compare(a, b); }); }, function () { return (index < indexes.length) ? this.yieldReturn(buffer[indexes[index++]]) : false; }, Functions.Blank ); }; var SortContext = function (keySelector, descending, child) { this.keySelector = keySelector; this.descending = descending; this.child = child; this.keys = null; }; SortContext.create = function (orderedEnumerable, currentContext) { var context = new SortContext(orderedEnumerable.keySelector, orderedEnumerable.descending, currentContext); if (orderedEnumerable.parent != null) return SortContext.create(orderedEnumerable.parent, context); return context; }; SortContext.prototype.GenerateKeys = function (source) { var len = source.length; var keySelector = this.keySelector; var keys = new Array(len); for (var i = 0; i < len; i++) keys[i] = keySelector(source[i]); this.keys = keys; if (this.child != null) this.child.GenerateKeys(source); }; SortContext.prototype.compare = function (index1, index2) { var comparison = Utils.compare(this.keys[index1], this.keys[index2]); if (comparison == 0) { if (this.child != null) return this.child.compare(index1, index2); return Utils.compare(index1, index2); } return (this.descending) ? -comparison : comparison; }; var DisposableEnumerable = function (getEnumerator, dispose) { this.dispose = dispose; Enumerable.call(this, getEnumerator); }; DisposableEnumerable.prototype = new Enumerable(); // optimize array or arraylike object var ArrayEnumerable = function (source) { this.getSource = function () { return source; }; }; ArrayEnumerable.prototype = new Enumerable(); ArrayEnumerable.prototype.any = function (predicate) { return (predicate == null) ? (this.getSource().length > 0) : Enumerable.prototype.any.apply(this, arguments); }; ArrayEnumerable.prototype.count = function (predicate) { return (predicate == null) ? this.getSource().length : Enumerable.prototype.count.apply(this, arguments); }; ArrayEnumerable.prototype.elementAt = function (index) { var source = this.getSource(); return (0 <= index && index < source.length) ? source[index] : Enumerable.prototype.elementAt.apply(this, arguments); }; ArrayEnumerable.prototype.elementAtOrDefault = function (index, defaultValue) { if (defaultValue === undefined) defaultValue = null; var source = this.getSource(); return (0 <= index && index < source.length) ? source[index] : defaultValue; }; ArrayEnumerable.prototype.first = function (predicate) { var source = this.getSource(); return (predicate == null && source.length > 0) ? source[0] : Enumerable.prototype.first.apply(this, arguments); }; ArrayEnumerable.prototype.firstOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) { return Enumerable.prototype.firstOrDefault.apply(this, arguments); } var source = this.getSource(); return source.length > 0 ? source[0] : defaultValue; }; ArrayEnumerable.prototype.last = function (predicate) { var source = this.getSource(); return (predicate == null && source.length > 0) ? source[source.length - 1] : Enumerable.prototype.last.apply(this, arguments); }; ArrayEnumerable.prototype.lastOrDefault = function (predicate, defaultValue) { if (defaultValue === undefined) defaultValue = null; if (predicate != null) { return Enumerable.prototype.lastOrDefault.apply(this, arguments); } var source = this.getSource(); return source.length > 0 ? source[source.length - 1] : defaultValue; }; ArrayEnumerable.prototype.skip = function (count) { var source = this.getSource(); return new Enumerable(function () { var index; return new IEnumerator( function () { index = (count < 0) ? 0 : count; }, function () { return (index < source.length) ? this.yieldReturn(source[index++]) : false; }, Functions.Blank); }); }; ArrayEnumerable.prototype.takeExceptLast = function (count) { if (count == null) count = 1; return this.take(this.getSource().length - count); }; ArrayEnumerable.prototype.takeFromLast = function (count) { return this.skip(this.getSource().length - count); }; ArrayEnumerable.prototype.reverse = function () { var source = this.getSource(); return new Enumerable(function () { var index; return new IEnumerator( function () { index = source.length; }, function () { return (index > 0) ? this.yieldReturn(source[--index]) : false; }, Functions.Blank); }); }; ArrayEnumerable.prototype.sequenceEqual = function (second, compareSelector) { if ((second instanceof ArrayEnumerable || second instanceof Array) && compareSelector == null && Enumerable.from(second).count() != this.count()) { return false; } return Enumerable.prototype.sequenceEqual.apply(this, arguments); }; ArrayEnumerable.prototype.toJoinedString = function (separator, selector) { var source = this.getSource(); if (selector != null || !(source instanceof Array)) { return Enumerable.prototype.toJoinedString.apply(this, arguments); } if (separator == null) separator = ""; return source.join(separator); }; ArrayEnumerable.prototype.getEnumerator = function () { var source = this.getSource(); var index = -1; // fast and simple enumerator return { current: function () { return source[index]; }, moveNext: function () { return ++index < source.length; }, dispose: Functions.Blank }; }; // optimization for multiple where and multiple select and whereselect var WhereEnumerable = function (source, predicate) { this.prevSource = source; this.prevPredicate = predicate; // predicate.length always <= 1 }; WhereEnumerable.prototype = new Enumerable(); WhereEnumerable.prototype.where = function (predicate) { predicate = Utils.createLambda(predicate); if (predicate.length <= 1) { var prevPredicate = this.prevPredicate; var composedPredicate = function (x) { return prevPredicate(x) && predicate(x); }; return new WhereEnumerable(this.prevSource, composedPredicate); } else { // if predicate use index, can't compose return Enumerable.prototype.where.call(this, predicate); } }; WhereEnumerable.prototype.select = function (selector) { selector = Utils.createLambda(selector); return (selector.length <= 1) ? new WhereSelectEnumerable(this.prevSource, this.prevPredicate, selector) : Enumerable.prototype.select.call(this, selector); }; WhereEnumerable.prototype.getEnumerator = function () { var predicate = this.prevPredicate; var source = this.prevSource; var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (predicate(enumerator.current())) { return this.yieldReturn(enumerator.current()); } } return false; }, function () { Utils.dispose(enumerator); }); }; var WhereSelectEnumerable = function (source, predicate, selector) { this.prevSource = source; this.prevPredicate = predicate; // predicate.length always <= 1 or null this.prevSelector = selector; // selector.length always <= 1 }; WhereSelectEnumerable.prototype = new Enumerable(); WhereSelectEnumerable.prototype.where = function (predicate) { predicate = Utils.createLambda(predicate); return (predicate.length <= 1) ? new WhereEnumerable(this, predicate) : Enumerable.prototype.where.call(this, predicate); }; WhereSelectEnumerable.prototype.select = function (selector) { selector = Utils.createLambda(selector); if (selector.length <= 1) { var prevSelector = this.prevSelector; var composedSelector = function (x) { return selector(prevSelector(x)); }; return new WhereSelectEnumerable(this.prevSource, this.prevPredicate, composedSelector); } else { // if selector use index, can't compose return Enumerable.prototype.select.call(this, selector); } }; WhereSelectEnumerable.prototype.getEnumerator = function () { var predicate = this.prevPredicate; var selector = this.prevSelector; var source = this.prevSource; var enumerator; return new IEnumerator( function () { enumerator = source.getEnumerator(); }, function () { while (enumerator.moveNext()) { if (predicate == null || predicate(enumerator.current())) { return this.yieldReturn(selector(enumerator.current())); } } return false; }, function () { Utils.dispose(enumerator); }); }; // Collections var Dictionary = (function () { // static utility methods var callHasOwnProperty = function (target, key) { return Object.prototype.hasOwnProperty.call(target, key); }; var computeHashCode = function (obj) { if (obj === null) return "null"; if (obj === undefined) return "undefined"; return (typeof obj.toString === Types.Function) ? obj.toString() : Object.prototype.toString.call(obj); }; // LinkedList for Dictionary var HashEntry = function (key, value) { this.key = key; this.value = value; this.prev = null; this.next = null; }; var EntryList = function () { this.first = null; this.last = null; }; EntryList.prototype = { addLast: function (entry) { if (this.last != null) { this.last.next = entry; entry.prev = this.last; this.last = entry; } else this.first = this.last = entry; }, replace: function (entry, newEntry) { if (entry.prev != null) { entry.prev.next = newEntry; newEntry.prev = entry.prev; } else this.first = newEntry; if (entry.next != null) { entry.next.prev = newEntry; newEntry.next = entry.next; } else this.last = newEntry; }, remove: function (entry) { if (entry.prev != null) entry.prev.next = entry.next; else this.first = entry.next; if (entry.next != null) entry.next.prev = entry.prev; else this.last = entry.prev; } }; // Overload:function() // Overload:function(compareSelector) var Dictionary = function (compareSelector) { this.countField = 0; this.entryList = new EntryList(); this.buckets = {}; // as Dictionary<string,List<object>> this.compareSelector = (compareSelector == null) ? Functions.Identity : compareSelector; }; Dictionary.prototype = { add: function (key, value) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); var entry = new HashEntry(key, value); if (callHasOwnProperty(this.buckets, hash)) { var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) { this.entryList.replace(array[i], entry); array[i] = entry; return; } } array.push(entry); } else { this.buckets[hash] = [entry]; } this.countField++; this.entryList.addLast(entry); }, get: function (key) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (!callHasOwnProperty(this.buckets, hash)) return undefined; var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { var entry = array[i]; if (this.compareSelector(entry.key) === compareKey) return entry.value; } return undefined; }, set: function (key, value) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (callHasOwnProperty(this.buckets, hash)) { var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) { var newEntry = new HashEntry(key, value); this.entryList.replace(array[i], newEntry); array[i] = newEntry; return true; } } } return false; }, contains: function (key) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (!callHasOwnProperty(this.buckets, hash)) return false; var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) return true; } return false; }, clear: function () { this.countField = 0; this.buckets = {}; this.entryList = new EntryList(); }, remove: function (key) { var compareKey = this.compareSelector(key); var hash = computeHashCode(compareKey); if (!callHasOwnProperty(this.buckets, hash)) return; var array = this.buckets[hash]; for (var i = 0; i < array.length; i++) { if (this.compareSelector(array[i].key) === compareKey) { this.entryList.remove(array[i]); array.splice(i, 1); if (array.length == 0) delete this.buckets[hash]; this.countField--; return; } } }, count: function () { return this.countField; }, toEnumerable: function () { var self = this; return new Enumerable(function () { var currentEntry; return new IEnumerator( function () { currentEntry = self.entryList.first; }, function () { if (currentEntry != null) { var result = { key: currentEntry.key, value: currentEntry.value }; currentEntry = currentEntry.next; return this.yieldReturn(result); } return false; }, Functions.Blank); }); } }; return Dictionary; })(); // dictionary = Dictionary<TKey, TValue[]> var Lookup = function (dictionary) { this.count = function () { return dictionary.count(); }; this.get = function (key) { return Enumerable.from(dictionary.get(key)); }; this.contains = function (key) { return dictionary.contains(key); }; this.toEnumerable = function () { return dictionary.toEnumerable().select(function (kvp) { return new Grouping(kvp.key, kvp.value); }); }; }; var Grouping = function (groupKey, elements) { this.key = function () { return groupKey; }; ArrayEnumerable.call(this, elements); }; Grouping.prototype = new ArrayEnumerable(); // module export if (typeof define === Types.Function && define.amd) { // AMD define("linqjs", [], function () { return Enumerable; }); } else if (typeof module !== Types.Undefined && module.exports) { // Node module.exports = Enumerable; } else { root.Enumerable = Enumerable; } })(this);
codemonkeychris/rainbow
linqjs/linq.js
JavaScript
apache-2.0
107,573
package org.apereo.cas.ticket.code; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.ticket.ExpirationPolicy; import org.apereo.cas.ticket.Ticket; import org.apereo.cas.ticket.TicketFactory; import org.apereo.cas.ticket.UniqueTicketIdGenerator; import org.apereo.cas.util.DefaultUniqueTicketIdGenerator; /** * Default OAuth code factory. * * @author Jerome Leleu * @since 5.0.0 */ public class DefaultOAuthCodeFactory implements OAuthCodeFactory { /** Default instance for the ticket id generator. */ protected final UniqueTicketIdGenerator oAuthCodeIdGenerator; /** ExpirationPolicy for refresh tokens. */ protected final ExpirationPolicy expirationPolicy; public DefaultOAuthCodeFactory(final ExpirationPolicy expirationPolicy) { this(new DefaultUniqueTicketIdGenerator(), expirationPolicy); } public DefaultOAuthCodeFactory(final UniqueTicketIdGenerator refreshTokenIdGenerator, final ExpirationPolicy expirationPolicy) { this.oAuthCodeIdGenerator = refreshTokenIdGenerator; this.expirationPolicy = expirationPolicy; } @Override public OAuthCode create(final Service service, final Authentication authentication) { final String codeId = this.oAuthCodeIdGenerator.getNewTicketId(OAuthCode.PREFIX); return new OAuthCodeImpl(codeId, service, authentication, this.expirationPolicy); } @Override public <T extends TicketFactory> T get(final Class<? extends Ticket> clazz) { return (T) this; } }
gabedwrds/cas
support/cas-server-support-oauth/src/main/java/org/apereo/cas/ticket/code/DefaultOAuthCodeFactory.java
Java
apache-2.0
1,592
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl; import com.intellij.codeHighlighting.EditorBoundHighlightingPass; import com.intellij.codeHighlighting.HighlightingPass; import com.intellij.codeHighlighting.TextEditorHighlightingPass; import com.intellij.codeHighlighting.TextEditorHighlightingPassRegistrar; import com.intellij.concurrency.Job; import com.intellij.concurrency.JobLauncher; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.ex.ApplicationUtil; import com.intellij.openapi.application.impl.ApplicationImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.TextEditor; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.util.Functions; import com.intellij.util.containers.CollectionFactory; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashingStrategy; import com.intellij.util.ui.UIUtil; import it.unimi.dsi.fastutil.ints.Int2ObjectMap; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.regex.Pattern; final class PassExecutorService implements Disposable { static final Logger LOG = Logger.getInstance(PassExecutorService.class); private static final boolean CHECK_CONSISTENCY = ApplicationManager.getApplication().isUnitTestMode(); private final Map<ScheduledPass, Job<Void>> mySubmittedPasses = new ConcurrentHashMap<>(); private final Project myProject; private volatile boolean isDisposed; private final AtomicInteger nextAvailablePassId; // used to assign random id to a pass if not set PassExecutorService(@NotNull Project project) { myProject = project; nextAvailablePassId = ((TextEditorHighlightingPassRegistrarImpl)TextEditorHighlightingPassRegistrar.getInstance(myProject)).getNextAvailableId(); } @Override public void dispose() { cancelAll(true); // some workers could, although idle, still retain some thread references for some time causing leak hunter to frown ForkJoinPool.commonPool().awaitQuiescence(1, TimeUnit.SECONDS); isDisposed = true; } void cancelAll(boolean waitForTermination) { for (Map.Entry<ScheduledPass, Job<Void>> entry : mySubmittedPasses.entrySet()) { Job<Void> job = entry.getValue(); ScheduledPass pass = entry.getKey(); pass.myUpdateProgress.cancel(); job.cancel(); } try { if (waitForTermination) { while (!waitFor(50)) { int i = 0; } } } catch (ProcessCanceledException ignored) { } catch (Error | RuntimeException e) { throw e; } catch (Throwable throwable) { LOG.error(throwable); } finally { mySubmittedPasses.clear(); } } void submitPasses(@NotNull Map<FileEditor, HighlightingPass[]> passesMap, // a list of opened FileEditors for each Document. The first FileEditor in the list is the preferred one @NotNull Map<Document, List<FileEditor>> documentToEditors, @NotNull DaemonProgressIndicator updateProgress) { if (isDisposed()) return; Map<FileEditor, List<TextEditorHighlightingPass>> documentBoundPasses = new HashMap<>(); Map<FileEditor, List<EditorBoundHighlightingPass>> editorBoundPasses = new HashMap<>(); Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass = new HashMap<>(); List<ScheduledPass> freePasses = new ArrayList<>(documentToEditors.size() * 5); AtomicInteger threadsToStartCountdown = new AtomicInteger(0); for (Map.Entry<FileEditor, HighlightingPass[]> entry : passesMap.entrySet()) { FileEditor fileEditor = entry.getKey(); HighlightingPass[] passes = entry.getValue(); for (HighlightingPass pass : passes) { Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(30)); if (pass instanceof EditorBoundHighlightingPass) { EditorBoundHighlightingPass editorPass = (EditorBoundHighlightingPass)pass; // have to make ids unique for this document assignUniqueId(editorPass, thisEditorId2Pass); editorBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(editorPass); } else if (pass instanceof TextEditorHighlightingPass) { TextEditorHighlightingPass tePass = (TextEditorHighlightingPass)pass; assignUniqueId(tePass, thisEditorId2Pass); documentBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(tePass); } else { // generic HighlightingPass, run all of them concurrently freePasses.add(new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown)); } } } List<ScheduledPass> dependentPasses = new ArrayList<>(documentToEditors.size() * 10); // fileEditor-> (passId -> created pass) Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted = new HashMap<>(passesMap.size()); for (Map.Entry<Document, List<FileEditor>> entry : documentToEditors.entrySet()) { List<FileEditor> fileEditors = entry.getValue(); FileEditor preferredFileEditor = fileEditors.get(0); // assumption: the preferred fileEditor is stored first List<TextEditorHighlightingPass> passes = documentBoundPasses.get(preferredFileEditor); if (passes == null || passes.isEmpty()) { continue; } sortById(passes); for (TextEditorHighlightingPass pass : passes) { createScheduledPass(preferredFileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } for (Map.Entry<FileEditor, List<EditorBoundHighlightingPass>> entry : editorBoundPasses.entrySet()) { FileEditor fileEditor = entry.getKey(); Collection<EditorBoundHighlightingPass> createdEditorBoundPasses = entry.getValue(); for (EditorBoundHighlightingPass pass : createdEditorBoundPasses) { createScheduledPass(fileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } if (CHECK_CONSISTENCY && !ApplicationManagerEx.isInStressTest()) { assertConsistency(freePasses, toBeSubmitted, threadsToStartCountdown); } if (LOG.isDebugEnabled()) { Set<VirtualFile> vFiles = ContainerUtil.map2Set(passesMap.keySet(), FileEditor::getFile); log(updateProgress, null, vFiles + " ----- starting " + threadsToStartCountdown.get(), freePasses); } for (ScheduledPass dependentPass : dependentPasses) { mySubmittedPasses.put(dependentPass, Job.nullJob()); } for (ScheduledPass freePass : freePasses) { submit(freePass); } } private void assignUniqueId(@NotNull TextEditorHighlightingPass pass, @NotNull Int2ObjectMap<TextEditorHighlightingPass> id2Pass) { int id = pass.getId(); if (id == -1 || id == 0) { id = nextAvailablePassId.incrementAndGet(); pass.setId(id); } TextEditorHighlightingPass prevPass = id2Pass.put(id, pass); if (prevPass != null) { LOG.error("Duplicate pass id found: "+id+". Both passes returned the same getId(): "+prevPass+" ("+prevPass.getClass() +") and "+pass+" ("+pass.getClass()+")"); } } private void assertConsistency(@NotNull List<ScheduledPass> freePasses, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull AtomicInteger threadsToStartCountdown) { assert threadsToStartCountdown.get() == toBeSubmitted.values().stream().mapToInt(m->m.size()).sum(); Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits = CollectionFactory.createCustomHashingStrategyMap(new HashingStrategy<>() { @Override public int hashCode(@Nullable PassExecutorService.ScheduledPass sp) { if (sp == null) return 0; return ((TextEditorHighlightingPass)sp.myPass).getId() * 31 + sp.myFileEditor.hashCode(); } @Override public boolean equals(@Nullable PassExecutorService.ScheduledPass sp1, @Nullable PassExecutorService.ScheduledPass sp2) { if (sp1 == null || sp2 == null) return sp1 == sp2; int id1 = ((TextEditorHighlightingPass)sp1.myPass).getId(); int id2 = ((TextEditorHighlightingPass)sp2.myPass).getId(); return id1 == id2 && sp1.myFileEditor == sp2.myFileEditor; } }); for (ScheduledPass freePass : freePasses) { HighlightingPass pass = freePass.myPass; if (pass instanceof TextEditorHighlightingPass) { id2Visits.put(freePass, Pair.create(freePass, 0)); checkConsistency(freePass, id2Visits); } } for (Map.Entry<ScheduledPass, Pair<ScheduledPass, Integer>> entry : id2Visits.entrySet()) { int count = entry.getValue().second; assert count == 0 : entry.getKey(); } assert id2Visits.size() == threadsToStartCountdown.get() : "Expected "+threadsToStartCountdown+" but got "+id2Visits.size()+": "+id2Visits; } private void checkConsistency(@NotNull ScheduledPass pass, Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits) { for (ScheduledPass succ : ContainerUtil.concat(pass.mySuccessorsOnCompletion, pass.mySuccessorsOnSubmit)) { Pair<ScheduledPass, Integer> succPair = id2Visits.get(succ); if (succPair == null) { succPair = Pair.create(succ, succ.myRunningPredecessorsCount.get()); id2Visits.put(succ, succPair); } int newPred = succPair.second - 1; id2Visits.put(succ, Pair.create(succ, newPred)); assert newPred >= 0; if (newPred == 0) { checkConsistency(succ, id2Visits); } } } @NotNull private ScheduledPass createScheduledPass(@NotNull FileEditor fileEditor, @NotNull TextEditorHighlightingPass pass, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass, @NotNull List<ScheduledPass> freePasses, @NotNull List<ScheduledPass> dependentPasses, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger threadsToStartCountdown) { Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass = toBeSubmitted.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20)); Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20)); int passId = pass.getId(); ScheduledPass scheduledPass = thisEditorId2ScheduledPass.get(passId); if (scheduledPass != null) return scheduledPass; scheduledPass = new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown); threadsToStartCountdown.incrementAndGet(); thisEditorId2ScheduledPass.put(passId, scheduledPass); for (int predecessorId : pass.getCompletionPredecessorIds()) { ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown, predecessorId, thisEditorId2ScheduledPass, thisEditorId2Pass); if (predecessor != null) { predecessor.addSuccessorOnCompletion(scheduledPass); } } for (int predecessorId : pass.getStartingPredecessorIds()) { ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown, predecessorId, thisEditorId2ScheduledPass, thisEditorId2Pass); if (predecessor != null) { predecessor.addSuccessorOnSubmit(scheduledPass); } } if (scheduledPass.myRunningPredecessorsCount.get() == 0 && !freePasses.contains(scheduledPass)) { freePasses.add(scheduledPass); } else if (!dependentPasses.contains(scheduledPass)) { dependentPasses.add(scheduledPass); } if (pass.isRunIntentionPassAfter() && fileEditor instanceof TextEditor) { Editor editor = ((TextEditor)fileEditor).getEditor(); VirtualFile virtualFile = fileEditor.getFile(); PsiFile psiFile = virtualFile == null ? null : ReadAction.compute(() -> PsiManager.getInstance(myProject).findFile(virtualFile)); if (psiFile != null) { ShowIntentionsPass ip = new ShowIntentionsPass(psiFile, editor, false); assignUniqueId(ip, thisEditorId2Pass); ip.setCompletionPredecessorIds(new int[]{passId}); createScheduledPass(fileEditor, ip, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } return scheduledPass; } private ScheduledPass findOrCreatePredecessorPass(@NotNull FileEditor fileEditor, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass, @NotNull List<ScheduledPass> freePasses, @NotNull List<ScheduledPass> dependentPasses, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger myThreadsToStartCountdown, int predecessorId, @NotNull Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass, @NotNull Int2ObjectMap<? extends TextEditorHighlightingPass> thisEditorId2Pass) { ScheduledPass predecessor = thisEditorId2ScheduledPass.get(predecessorId); if (predecessor == null) { TextEditorHighlightingPass textEditorPass = thisEditorId2Pass.get(predecessorId); predecessor = textEditorPass == null ? null : createScheduledPass(fileEditor, textEditorPass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, myThreadsToStartCountdown); } return predecessor; } private void submit(@NotNull ScheduledPass pass) { if (!pass.myUpdateProgress.isCanceled()) { Job<Void> job = JobLauncher.getInstance().submitToJobThread(pass, future -> { try { if (!future.isCancelled()) { // for canceled task .get() generates CancellationException which is expensive future.get(); } } catch (CancellationException | InterruptedException ignored) { } catch (ExecutionException e) { LOG.error(e.getCause()); } }); mySubmittedPasses.put(pass, job); } } private final class ScheduledPass implements Runnable { private final FileEditor myFileEditor; private final HighlightingPass myPass; private final AtomicInteger myThreadsToStartCountdown; private final AtomicInteger myRunningPredecessorsCount = new AtomicInteger(0); private final List<ScheduledPass> mySuccessorsOnCompletion = new ArrayList<>(); private final List<ScheduledPass> mySuccessorsOnSubmit = new ArrayList<>(); @NotNull private final DaemonProgressIndicator myUpdateProgress; private ScheduledPass(@NotNull FileEditor fileEditor, @NotNull HighlightingPass pass, @NotNull DaemonProgressIndicator progressIndicator, @NotNull AtomicInteger threadsToStartCountdown) { myFileEditor = fileEditor; myPass = pass; myThreadsToStartCountdown = threadsToStartCountdown; myUpdateProgress = progressIndicator; } @Override public void run() { ((ApplicationImpl)ApplicationManager.getApplication()).executeByImpatientReader(() -> { try { doRun(); } catch (ApplicationUtil.CannotRunReadActionException e) { myUpdateProgress.cancel(); } catch (RuntimeException | Error e) { saveException(e, myUpdateProgress); throw e; } }); } private void doRun() { if (myUpdateProgress.isCanceled()) return; log(myUpdateProgress, myPass, "Started. "); for (ScheduledPass successor : mySuccessorsOnSubmit) { int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet(); if (predecessorsToRun == 0) { submit(successor); } } ProgressManager.getInstance().executeProcessUnderProgress(() -> { boolean success = ApplicationManagerEx.getApplicationEx().tryRunReadAction(() -> { try { if (DumbService.getInstance(myProject).isDumb() && !DumbService.isDumbAware(myPass)) { return; } if (!myUpdateProgress.isCanceled() && !myProject.isDisposed()) { myPass.collectInformation(myUpdateProgress); } } catch (ProcessCanceledException e) { log(myUpdateProgress, myPass, "Canceled "); if (!myUpdateProgress.isCanceled()) { myUpdateProgress.cancel(e); //in case when some smart asses throw PCE just for fun } } catch (RuntimeException | Error e) { myUpdateProgress.cancel(e); LOG.error(e); throw e; } }); if (!success) { myUpdateProgress.cancel(); } }, myUpdateProgress); log(myUpdateProgress, myPass, "Finished. "); if (!myUpdateProgress.isCanceled()) { applyInformationToEditorsLater(myFileEditor, myPass, myUpdateProgress, myThreadsToStartCountdown, ()->{ for (ScheduledPass successor : mySuccessorsOnCompletion) { int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet(); if (predecessorsToRun == 0) { submit(successor); } } }); } } @NonNls @Override public String toString() { return "SP: " + myPass; } private void addSuccessorOnCompletion(@NotNull ScheduledPass successor) { mySuccessorsOnCompletion.add(successor); successor.myRunningPredecessorsCount.incrementAndGet(); } private void addSuccessorOnSubmit(@NotNull ScheduledPass successor) { mySuccessorsOnSubmit.add(successor); successor.myRunningPredecessorsCount.incrementAndGet(); } } private void applyInformationToEditorsLater(@NotNull FileEditor fileEditor, @NotNull HighlightingPass pass, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger threadsToStartCountdown, @NotNull Runnable callbackOnApplied) { ApplicationManager.getApplication().invokeLater(() -> { if (isDisposed() || !fileEditor.isValid()) { updateProgress.cancel(); } if (updateProgress.isCanceled()) { log(updateProgress, pass, " is canceled during apply, sorry"); return; } try { if (UIUtil.isShowing(fileEditor.getComponent())) { pass.applyInformationToEditor(); repaintErrorStripeAndIcon(fileEditor); if (pass instanceof TextEditorHighlightingPass) { FileStatusMap fileStatusMap = DaemonCodeAnalyzerEx.getInstanceEx(myProject).getFileStatusMap(); Document document = ((TextEditorHighlightingPass)pass).getDocument(); int passId = ((TextEditorHighlightingPass)pass).getId(); fileStatusMap.markFileUpToDate(document, passId); } log(updateProgress, pass, " Applied"); } } catch (ProcessCanceledException e) { log(updateProgress, pass, "Error " + e); throw e; } catch (RuntimeException e) { VirtualFile file = fileEditor.getFile(); FileType fileType = file == null ? null : file.getFileType(); String message = "Exception while applying information to " + fileEditor + "("+fileType+")"; log(updateProgress, pass, message + e); throw new RuntimeException(message, e); } if (threadsToStartCountdown.decrementAndGet() == 0) { HighlightingSessionImpl.waitForAllSessionsHighlightInfosApplied(updateProgress); log(updateProgress, pass, "Stopping "); updateProgress.stopIfRunning(); clearStaleEntries(); } else { log(updateProgress, pass, "Finished but there are passes in the queue: " + threadsToStartCountdown.get()); } callbackOnApplied.run(); }, updateProgress.getModalityState(), pass.getExpiredCondition()); } private void clearStaleEntries() { mySubmittedPasses.keySet().removeIf(pass -> pass.myUpdateProgress.isCanceled()); } private void repaintErrorStripeAndIcon(@NotNull FileEditor fileEditor) { if (fileEditor instanceof TextEditor) { DefaultHighlightInfoProcessor.repaintErrorStripeAndIcon(((TextEditor)fileEditor).getEditor(), myProject); } } private boolean isDisposed() { return isDisposed || myProject.isDisposed(); } @NotNull List<HighlightingPass> getAllSubmittedPasses() { List<HighlightingPass> result = new ArrayList<>(mySubmittedPasses.size()); for (ScheduledPass scheduledPass : mySubmittedPasses.keySet()) { if (!scheduledPass.myUpdateProgress.isCanceled()) { result.add(scheduledPass.myPass); } } return result; } private static void sortById(@NotNull List<? extends TextEditorHighlightingPass> result) { ContainerUtil.quickSort(result, Comparator.comparingInt(TextEditorHighlightingPass::getId)); } private static int getThreadNum() { Matcher matcher = Pattern.compile("JobScheduler FJ pool (\\d*)/(\\d*)").matcher(Thread.currentThread().getName()); String num = matcher.matches() ? matcher.group(1) : null; return StringUtil.parseInt(num, 0); } static void log(ProgressIndicator progressIndicator, HighlightingPass pass, @NonNls Object @NotNull ... info) { if (LOG.isDebugEnabled()) { Document document = pass instanceof TextEditorHighlightingPass ? ((TextEditorHighlightingPass)pass).getDocument() : null; CharSequence docText = document == null ? "" : ": '" + StringUtil.first(document.getCharsSequence(), 10, true)+ "'"; synchronized (PassExecutorService.class) { String infos = StringUtil.join(info, Functions.TO_STRING(), " "); String message = StringUtil.repeatSymbol(' ', getThreadNum() * 4) + " " + pass + " " + infos + "; progress=" + (progressIndicator == null ? null : progressIndicator.hashCode()) + " " + (progressIndicator == null ? "?" : progressIndicator.isCanceled() ? "X" : "V") + docText; LOG.debug(message); //System.out.println(message); } } } private static final Key<Throwable> THROWABLE_KEY = Key.create("THROWABLE_KEY"); static void saveException(@NotNull Throwable e, @NotNull DaemonProgressIndicator indicator) { indicator.putUserDataIfAbsent(THROWABLE_KEY, e); } @TestOnly static Throwable getSavedException(@NotNull DaemonProgressIndicator indicator) { return indicator.getUserData(THROWABLE_KEY); } // return true if terminated boolean waitFor(int millis) throws Throwable { try { for (Job<Void> job : mySubmittedPasses.values()) { job.waitForCompletion(millis); } return true; } catch (TimeoutException ignored) { return false; } catch (InterruptedException e) { return true; } catch (ExecutionException e) { throw e.getCause(); } } }
jwren/intellij-community
platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/PassExecutorService.java
Java
apache-2.0
25,837
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Channel.V1.Snippets { // [START cloudchannel_v1_generated_CloudChannelService_DeleteCustomer_sync] using Google.Cloud.Channel.V1; public sealed partial class GeneratedCloudChannelServiceClientSnippets { /// <summary>Snippet for DeleteCustomer</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void DeleteCustomerRequestObject() { // Create client CloudChannelServiceClient cloudChannelServiceClient = CloudChannelServiceClient.Create(); // Initialize request argument(s) DeleteCustomerRequest request = new DeleteCustomerRequest { CustomerName = CustomerName.FromAccountCustomer("[ACCOUNT]", "[CUSTOMER]"), }; // Make the request cloudChannelServiceClient.DeleteCustomer(request); } } // [END cloudchannel_v1_generated_CloudChannelService_DeleteCustomer_sync] }
googleapis/google-cloud-dotnet
apis/Google.Cloud.Channel.V1/Google.Cloud.Channel.V1.GeneratedSnippets/CloudChannelServiceClient.DeleteCustomerRequestObjectSnippet.g.cs
C#
apache-2.0
1,748
/* * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.hal.client.runtime.subsystem.elytron.wizardpassword; public enum PasswordState { CHOOSE_PASSWORD_TYPE, CONFIGURATION, REVIEW }
hpehl/hal.next
app/src/main/java/org/jboss/hal/client/runtime/subsystem/elytron/wizardpassword/PasswordState.java
Java
apache-2.0
787
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.portal.repository.impl; import org.apache.rave.exception.NotSupportedException; import org.apache.commons.lang3.StringUtils; import org.apache.rave.exception.DataSerializationException; import org.apache.rave.model.ApplicationData; import org.apache.rave.portal.model.JpaApplicationData; import org.apache.rave.portal.model.conversion.JpaApplicationDataConverter; import org.apache.rave.portal.repository.ApplicationDataRepository; import org.apache.rave.util.CollectionUtils; import org.apache.rave.util.JsonUtils; import org.json.JSONException; import org.json.JSONObject; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Transactional; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityManager; import javax.persistence.Lob; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.apache.rave.persistence.jpa.util.JpaUtil.getSingleResult; import static org.apache.rave.persistence.jpa.util.JpaUtil.saveOrUpdate; @Repository public class JpaApplicationDataRepository implements ApplicationDataRepository { @PersistenceContext private EntityManager manager; @Autowired private JpaApplicationDataConverter converter; @Override public Class<? extends ApplicationData> getType() { return JpaApplicationData.class; } @Override public ApplicationData get(String id) { JpaSerializableApplicationData applicationData = (JpaSerializableApplicationData) manager.find(JpaApplicationData.class, Long.parseLong(id)); if (applicationData != null) { applicationData.deserializeData(); } return applicationData; } @Override @Transactional public JpaApplicationData save(ApplicationData item) { JpaApplicationData jpaAppData = converter.convert(item); JpaSerializableApplicationData jpaSerializableApplicationData = getJpaSerializableApplicationData(jpaAppData); jpaSerializableApplicationData.serializeData(); return saveOrUpdate(jpaSerializableApplicationData.getEntityId(), manager, jpaSerializableApplicationData); } @Override public void delete(ApplicationData item) { manager.remove(item instanceof JpaApplicationData ? item : get(item.getId())); } @Override public List<ApplicationData> getAll() { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public List<ApplicationData> getLimitedList(int offset, int limit) { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public int getCountAll() { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public List<ApplicationData> getApplicationData(List<String> userIds, String appId) { //if the call is only looking for data for a single user use the more efficient single user variant transparently if (userIds.size() == 1) { List<ApplicationData> data = new ArrayList<ApplicationData>(); ApplicationData applicationData = getApplicationData(userIds.get(0), appId); if (applicationData != null) { data.add(applicationData); } return data; } TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_IDS_AND_APP_ID, JpaSerializableApplicationData.class); query.setParameter(JpaApplicationData.USER_IDS_PARAM, userIds); query.setParameter(JpaApplicationData.APP_URL_PARAM, appId); List<JpaSerializableApplicationData> results = query.getResultList(); for (JpaSerializableApplicationData applicationData : results) { applicationData.deserializeData(); } return CollectionUtils.<ApplicationData>toBaseTypedList(results); } @Override public JpaApplicationData getApplicationData(String personId, String appId) { TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_ID_AND_APP_ID, JpaSerializableApplicationData.class); query.setParameter(JpaApplicationData.USER_ID_PARAM, personId); query.setParameter(JpaApplicationData.APP_URL_PARAM, appId); JpaSerializableApplicationData applicationData = getSingleResult(query.getResultList()); if (applicationData != null) { applicationData.deserializeData(); } return applicationData; } private JpaSerializableApplicationData getJpaSerializableApplicationData(JpaApplicationData applicationData) { if (applicationData instanceof JpaSerializableApplicationData) { return (JpaSerializableApplicationData) applicationData; } return new JpaSerializableApplicationData(applicationData.getEntityId(), applicationData.getUserId(), applicationData.getAppUrl(), applicationData.getData()); } /** * This class is here so that the details of the persistence strategy in use for serializing the appdata map to a * JSON string doesnt end up being reflected in any public API of the ApplicationData object itself. * <p/> * This allows the public API of this repository to deal in clean ApplicationData models, but under the covers it * uses this model for the actual persistence to the database. */ @Entity public static class JpaSerializableApplicationData extends JpaApplicationData { @Lob @Column(name = "serialized_data") private String serializedData; public JpaSerializableApplicationData() { super(); } public JpaSerializableApplicationData(Long entityId, String userId, String appUrl, Map<String, Object> data) { super(entityId, userId, appUrl, data); } public void serializeData() { Map<String, Object> data = this.getData(); if (data != null) { serializedData = JsonUtils.stringify(data); } } @SuppressWarnings("unchecked") public void deserializeData() { if (serializedData != null && StringUtils.isNotBlank(serializedData)) { this.setData(JsonUtils.parse(serializedData, Map.class)); } } } }
kidaa/rave
rave-components/rave-jpa/src/main/java/org/apache/rave/portal/repository/impl/JpaApplicationDataRepository.java
Java
apache-2.0
7,525
# Copyright 2017 Battelle Energy Alliance, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Created on Jan 21, 2020 @author: alfoa, wangc Lasso model fit with Lars using BIC or AIC for model selection. """ #Internal Modules (Lazy Importer)-------------------------------------------------------------------- #Internal Modules (Lazy Importer) End---------------------------------------------------------------- #External Modules------------------------------------------------------------------------------------ from numpy import finfo #External Modules End-------------------------------------------------------------------------------- #Internal Modules------------------------------------------------------------------------------------ from SupervisedLearning.ScikitLearn import ScikitLearnBase from utils import InputData, InputTypes #Internal Modules End-------------------------------------------------------------------------------- class LassoLarsIC(ScikitLearnBase): """ Lasso model fit with Lars using BIC or AIC for model selection """ info = {'problemtype':'regression', 'normalize':False} def __init__(self): """ Constructor that will appropriately initialize a supervised learning object @ In, None @ Out, None """ super().__init__() import sklearn import sklearn.linear_model self.model = sklearn.linear_model.LassoLarsIC @classmethod def getInputSpecification(cls): """ Method to get a reference to a class that specifies the input data for class cls. @ In, cls, the class for which we are retrieving the specification @ Out, inputSpecification, InputData.ParameterInput, class to use for specifying input of cls. """ specs = super(LassoLarsIC, cls).getInputSpecification() specs.description = r"""The \xmlNode{LassoLarsIC} (\textit{Lasso model fit with Lars using BIC or AIC for model selection}) is a Lasso model fit with Lars using BIC or AIC for model selection. The optimization objective for Lasso is: $(1 / (2 * n\_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1$ AIC is the Akaike information criterion and BIC is the Bayes Information criterion. Such criteria are useful to select the value of the regularization parameter by making a trade-off between the goodness of fit and the complexity of the model. A good model should explain well the data while being simple. \zNormalizationNotPerformed{LassoLarsIC} """ specs.addSub(InputData.parameterInputFactory("criterion", contentType=InputTypes.makeEnumType("criterion", "criterionType",['bic', 'aic']), descr=r"""The type of criterion to use.""", default='aic')) specs.addSub(InputData.parameterInputFactory("fit_intercept", contentType=InputTypes.BoolType, descr=r"""Whether the intercept should be estimated or not. If False, the data is assumed to be already centered.""", default=True)) specs.addSub(InputData.parameterInputFactory("normalize", contentType=InputTypes.BoolType, descr=r"""This parameter is ignored when fit_intercept is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm.""", default=True)) specs.addSub(InputData.parameterInputFactory("max_iter", contentType=InputTypes.IntegerType, descr=r"""The maximum number of iterations.""", default=500)) specs.addSub(InputData.parameterInputFactory("precompute", contentType=InputTypes.StringType, descr=r"""Whether to use a precomputed Gram matrix to speed up calculations. For sparse input this option is always True to preserve sparsity.""", default='auto')) specs.addSub(InputData.parameterInputFactory("eps", contentType=InputTypes.FloatType, descr=r"""The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the tol parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization.""", default=finfo(float).eps)) specs.addSub(InputData.parameterInputFactory("positive", contentType=InputTypes.BoolType, descr=r"""When set to True, forces the coefficients to be positive.""", default=False)) specs.addSub(InputData.parameterInputFactory("verbose", contentType=InputTypes.BoolType, descr=r"""Amount of verbosity.""", default=False)) return specs def _handleInput(self, paramInput): """ Function to handle the common parts of the distribution parameter input. @ In, paramInput, ParameterInput, the already parsed input. @ Out, None """ super()._handleInput(paramInput) settings, notFound = paramInput.findNodesAndExtractValues(['fit_intercept','max_iter', 'normalize', 'precompute', 'eps','positive','criterion', 'verbose']) # notFound must be empty assert(not notFound) self.initializeModel(settings)
idaholab/raven
framework/SupervisedLearning/ScikitLearn/LinearModel/LassoLarsIC.py
Python
apache-2.0
6,386
/* * Copyright 2015 John Ahlroos * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.tabsheet; import com.vaadin.shared.ui.tabsheet.TabsheetState; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DDLayoutState; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DragAndDropAwareState; public class DDTabSheetState extends TabsheetState implements DragAndDropAwareState { public static final float DEFAULT_HORIZONTAL_DROP_RATIO = 0.2f; public float tabLeftRightDropRatio = DEFAULT_HORIZONTAL_DROP_RATIO; public DDLayoutState ddState = new DDLayoutState(); @Override public DDLayoutState getDragAndDropState() { return ddState; } }
dimone-kun/cuba
modules/web-widgets/src/com/haulmont/cuba/web/widgets/client/addons/dragdroplayouts/ui/tabsheet/DDTabSheetState.java
Java
apache-2.0
1,315
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.fontbox.ttf; import java.io.IOException; /** * A table in a true type font. * * @author Ben Litchfield */ public class HorizontalMetricsTable extends TTFTable { /** * A tag that identifies this table type. */ public static final String TAG = "hmtx"; private int[] advanceWidth; private short[] leftSideBearing; private short[] nonHorizontalLeftSideBearing; private int numHMetrics; HorizontalMetricsTable(TrueTypeFont font) { super(font); } /** * This will read the required data from the stream. * * @param ttf The font that is being read. * @param data The stream to read the data from. * @throws IOException If there is an error reading the data. */ @Override void read(TrueTypeFont ttf, TTFDataStream data) throws IOException { HorizontalHeaderTable hHeader = ttf.getHorizontalHeader(); if (hHeader == null) { throw new IOException("Could not get hmtx table"); } numHMetrics = hHeader.getNumberOfHMetrics(); int numGlyphs = ttf.getNumberOfGlyphs(); int bytesRead = 0; advanceWidth = new int[ numHMetrics ]; leftSideBearing = new short[ numHMetrics ]; for( int i=0; i<numHMetrics; i++ ) { advanceWidth[i] = data.readUnsignedShort(); leftSideBearing[i] = data.readSignedShort(); bytesRead += 4; } int numberNonHorizontal = numGlyphs - numHMetrics; // handle bad fonts with too many hmetrics if (numberNonHorizontal < 0) { numberNonHorizontal = numGlyphs; } // make sure that table is never null and correct size, even with bad fonts that have no // "leftSideBearing" table although they should nonHorizontalLeftSideBearing = new short[numberNonHorizontal]; if (bytesRead < getLength()) { for( int i=0; i<numberNonHorizontal; i++ ) { if (bytesRead < getLength()) { nonHorizontalLeftSideBearing[i] = data.readSignedShort(); bytesRead += 2; } } } initialized = true; } /** * Returns the advance width for the given GID. * * @param gid GID */ public int getAdvanceWidth(int gid) { if (advanceWidth.length == 0) { return 250; } if (gid < numHMetrics) { return advanceWidth[gid]; } else { // monospaced fonts may not have a width for every glyph // the last one is for subsequent glyphs return advanceWidth[advanceWidth.length -1]; } } /** * Returns the left side bearing for the given GID. * * @param gid GID */ public int getLeftSideBearing(int gid) { if (leftSideBearing.length == 0) { return 0; } if (gid < numHMetrics) { return leftSideBearing[gid]; } else { return nonHorizontalLeftSideBearing[gid - numHMetrics]; } } }
apache/pdfbox
fontbox/src/main/java/org/apache/fontbox/ttf/HorizontalMetricsTable.java
Java
apache-2.0
4,062
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.asterix.external.library.java.base; import org.apache.asterix.external.api.IJObject; import org.apache.asterix.om.types.IAType; import org.apache.asterix.om.util.container.IObjectPool; public abstract class JComplexObject<T> implements IJObject<T> { protected IObjectPool<IJObject, IAType> pool; public void setPool(IObjectPool<IJObject, IAType> pool) { this.pool = pool; } }
apache/incubator-asterixdb
asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/base/JComplexObject.java
Java
apache-2.0
1,245
/* Copyright AppsCode Inc. and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 import ( "fmt" "strings" ) const ( KindDeployment = "Deployment" KindReplicaSet = "ReplicaSet" KindReplicationController = "ReplicationController" KindStatefulSet = "StatefulSet" KindDaemonSet = "DaemonSet" KindPod = "Pod" KindPersistentVolumeClaim = "PersistentVolumeClaim" KindAppBinding = "AppBinding" KindDeploymentConfig = "DeploymentConfig" KindSecret = "Secret" ) // LocalTypedReference contains enough information to let you inspect or modify the referred object. type LocalTypedReference struct { // Kind of the referent. // More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds // +optional Kind string `json:"kind,omitempty" protobuf:"bytes,1,opt,name=kind"` // Name of the referent. // More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names // +optional Name string `json:"name,omitempty" protobuf:"bytes,2,opt,name=name"` // API version of the referent. // +optional APIVersion string `json:"apiVersion,omitempty" protobuf:"bytes,3,opt,name=apiVersion"` } func (workload *LocalTypedReference) Canonicalize() error { if workload.Name == "" || workload.Kind == "" { return fmt.Errorf("missing workload name or kind") } switch strings.ToLower(workload.Kind) { case "deployments", "deployment", "deploy": workload.Kind = KindDeployment case "replicasets", "replicaset", "rs": workload.Kind = KindReplicaSet case "replicationcontrollers", "replicationcontroller", "rc": workload.Kind = KindReplicationController case "statefulsets", "statefulset": workload.Kind = KindStatefulSet case "daemonsets", "daemonset", "ds": workload.Kind = KindDaemonSet default: return fmt.Errorf(`unrecognized workload "Kind" %v`, workload.Kind) } return nil } func (workload LocalTypedReference) GetRepositoryCRDName(podName, nodeName string) string { name := "" switch workload.Kind { case KindDeployment, KindReplicaSet, KindReplicationController: name = strings.ToLower(workload.Kind) + "." + workload.Name case KindStatefulSet: name = strings.ToLower(workload.Kind) + "." + podName case KindDaemonSet: name = strings.ToLower(workload.Kind) + "." + workload.Name + "." + nodeName } return name } func (workload LocalTypedReference) HostnamePrefix(podName, nodeName string) (hostname, prefix string, err error) { if err := workload.Canonicalize(); err != nil { return "", "", err } if workload.Name == "" || workload.Kind == "" { return "", "", fmt.Errorf("missing workload name or kind") } switch workload.Kind { case KindDeployment, KindReplicaSet, KindReplicationController: return workload.Name, strings.ToLower(workload.Kind) + "/" + workload.Name, nil case KindStatefulSet: if podName == "" { return "", "", fmt.Errorf("missing podName for %s", KindStatefulSet) } return podName, strings.ToLower(workload.Kind) + "/" + podName, nil case KindDaemonSet: if nodeName == "" { return "", "", fmt.Errorf("missing nodeName for %s", KindDaemonSet) } return nodeName, strings.ToLower(workload.Kind) + "/" + workload.Name + "/" + nodeName, nil default: return "", "", fmt.Errorf(`unrecognized workload "Kind" %v`, workload.Kind) } } func StatefulSetPodName(appName, podOrdinal string) (string, error) { if appName == "" || podOrdinal == "" { return "", fmt.Errorf("missing appName or podOrdinal") } return appName + "-" + podOrdinal, nil }
k8sdb/apimachinery
vendor/stash.appscode.dev/apimachinery/apis/stash/v1alpha1/workload.go
GO
apache-2.0
4,090
package com.cloudhopper.commons.charset.demo; /* * #%L * ch-commons-charset * %% * Copyright (C) 2012 Cloudhopper by Twitter * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.cloudhopper.commons.charset.CharsetUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author joelauer */ public class Charset5Main { private static final Logger logger = LoggerFactory.getLogger(Charset5Main.class); static public void main(String[] args) throws Exception { String sourceString = "h\u6025\u20ACllo"; String targetString = CharsetUtil.normalize(sourceString, CharsetUtil.CHARSET_UTF_8); logger.debug("source string: " + sourceString); logger.debug("target string: " + targetString); } }
twitter/cloudhopper-commons
ch-commons-charset/src/test/java/com/cloudhopper/commons/charset/demo/Charset5Main.java
Java
apache-2.0
1,291
/* COPYRIGHT (c) 2014 Umut Acar, Arthur Chargueraud, and Michael * Rainey * All rights reserved. * * \file adjlist.hpp * \brief Adjacency-list graph format * */ #ifndef _PASL_GRAPH_ADJLIST_H_ #define _PASL_GRAPH_ADJLIST_H_ #include "../../graph/include/graph.hpp" /***********************************************************************/ namespace pasl { namespace graph { /*---------------------------------------------------------------------*/ /* Symmetric vertex */ template <class Vertex_id_bag> class symmetric_vertex { public: typedef Vertex_id_bag vtxid_bag_type; typedef typename vtxid_bag_type::value_type vtxid_type; symmetric_vertex() { } symmetric_vertex(vtxid_bag_type neighbors) : neighbors(neighbors) { } vtxid_bag_type neighbors; vtxid_type get_in_neighbor(vtxid_type j) const { return neighbors[j]; } vtxid_type get_out_neighbor(vtxid_type j) const { return neighbors[j]; } vtxid_type* get_in_neighbors() const { return neighbors.data(); } vtxid_type* get_out_neighbors() const { return neighbors.data(); } void set_in_neighbor(vtxid_type j, vtxid_type nbr) { neighbors[j] = nbr; } void set_out_neighbor(vtxid_type j, vtxid_type nbr) { neighbors[j] = nbr; } vtxid_type get_in_degree() const { return vtxid_type(neighbors.size()); } vtxid_type get_out_degree() const { return vtxid_type(neighbors.size()); } void set_in_degree(vtxid_type j) { neighbors.alloc(j); } // todo: use neighbors.resize() void set_out_degree(vtxid_type j) { neighbors.alloc(j); } void swap_in_neighbors(vtxid_bag_type& other) { neighbors.swap(other); } void swap_out_neighbors(vtxid_bag_type& other) { neighbors.swap(other); } void check(vtxid_type nb_vertices) const { #ifndef NDEBUG for (vtxid_type i = 0; i < neighbors.size(); i++) check_vertex(neighbors[i], nb_vertices); #endif } }; /*---------------------------------------------------------------------*/ /* Asymmetric vertex */ template <class Vertex_id_bag> class asymmetric_vertex { public: typedef Vertex_id_bag vtxid_bag_type; typedef typename vtxid_bag_type::value_type vtxid_type; vtxid_bag_type in_neighbors; vtxid_bag_type out_neighbors; vtxid_type get_in_neighbor(vtxid_type j) const { return in_neighbors[j]; } vtxid_type get_out_neighbor(vtxid_type j) const { return out_neighbors[j]; } vtxid_type* get_in_neighbors() const { return in_neighbors.data(); } vtxid_type* get_out_neighbors() const { return out_neighbors.data(); } void set_in_neighbor(vtxid_type j, vtxid_type nbr) { in_neighbors[j] = nbr; } void set_out_neighbor(vtxid_type j, vtxid_type nbr) { out_neighbors[j] = nbr; } vtxid_type get_in_degree() const { return vtxid_type(in_neighbors.size()); } vtxid_type get_out_degree() const { return vtxid_type(out_neighbors.size()); } void set_in_degree(vtxid_type j) { in_neighbors.alloc(j); } void set_out_degree(vtxid_type j) { out_neighbors.alloc(j); } void swap_in_neighbors(vtxid_bag_type& other) { in_neighbors.swap(other); } void swap_out_neighbors(vtxid_bag_type& other) { out_neighbors.swap(other); } void check(vtxid_type nb_vertices) const { for (vtxid_type i = 0; i < in_neighbors.size(); i++) check_vertex(in_neighbors[i], nb_vertices); for (vtxid_type i = 0; i < out_neighbors.size(); i++) check_vertex(out_neighbors[i], nb_vertices); } }; /*---------------------------------------------------------------------*/ /* Adjacency-list format */ template <class Adjlist_seq> class adjlist { public: typedef Adjlist_seq adjlist_seq_type; typedef typename adjlist_seq_type::value_type vertex_type; typedef typename vertex_type::vtxid_bag_type::value_type vtxid_type; typedef typename adjlist_seq_type::alias_type adjlist_seq_alias_type; typedef adjlist<adjlist_seq_alias_type> alias_type; edgeid_type nb_edges; adjlist_seq_type adjlists; adjlist() : nb_edges(0) { } adjlist(edgeid_type nb_edges) : nb_edges(nb_edges) { } vtxid_type get_nb_vertices() const { return vtxid_type(adjlists.size()); } void check() const { #ifndef NDEBUG for (vtxid_type i = 0; i < adjlists.size(); i++) adjlists[i].check(get_nb_vertices()); size_t m = 0; for (vtxid_type i = 0; i < adjlists.size(); i++) m += adjlists[i].get_in_degree(); assert(m == nb_edges); m = 0; for (vtxid_type i = 0; i < adjlists.size(); i++) m += adjlists[i].get_out_degree(); assert(m == nb_edges); #endif } }; /*---------------------------------------------------------------------*/ /* Equality operators */ template <class Vertex_id_bag> bool operator==(const symmetric_vertex<Vertex_id_bag>& v1, const symmetric_vertex<Vertex_id_bag>& v2) { using vtxid_type = typename symmetric_vertex<Vertex_id_bag>::vtxid_type; if (v1.get_out_degree() != v2.get_out_degree()) return false; for (vtxid_type i = 0; i < v1.get_out_degree(); i++) if (v1.get_out_neighbor(i) != v2.get_out_neighbor(i)) return false; return true; } template <class Vertex_id_bag> bool operator!=(const symmetric_vertex<Vertex_id_bag>& v1, const symmetric_vertex<Vertex_id_bag>& v2) { return ! (v1 == v2); } template <class Adjlist_seq> bool operator==(const adjlist<Adjlist_seq>& g1, const adjlist<Adjlist_seq>& g2) { using vtxid_type = typename adjlist<Adjlist_seq>::vtxid_type; if (g1.get_nb_vertices() != g2.get_nb_vertices()) return false; if (g1.nb_edges != g2.nb_edges) return false; for (vtxid_type i = 0; i < g1.get_nb_vertices(); i++) if (g1.adjlists[i] != g2.adjlists[i]) return false; return true; } template <class Adjlist_seq> bool operator!=(const adjlist<Adjlist_seq>& g1, const adjlist<Adjlist_seq>& g2) { return ! (g1 == g2); } /*---------------------------------------------------------------------*/ /* Flat adjacency-list format */ template <class Vertex_id, bool Is_alias = false> class flat_adjlist_seq { public: typedef flat_adjlist_seq<Vertex_id> self_type; typedef Vertex_id vtxid_type; typedef size_t size_type; typedef data::pointer_seq<vtxid_type> vertex_seq_type; typedef symmetric_vertex<vertex_seq_type> value_type; typedef flat_adjlist_seq<vtxid_type, true> alias_type; char* underlying_array; vtxid_type* offsets; vtxid_type nb_offsets; vtxid_type* edges; flat_adjlist_seq() : underlying_array(NULL), offsets(NULL), nb_offsets(0), edges(NULL) { } flat_adjlist_seq(const flat_adjlist_seq& other) { if (Is_alias) { underlying_array = other.underlying_array; offsets = other.offsets; nb_offsets = other.nb_offsets; edges = other.edges; } else { util::atomic::die("todo"); } } //! \todo instead of using Is_alias, pass either ptr_seq or array_seq as underlying_array ~flat_adjlist_seq() { if (! Is_alias) clear(); } void get_alias(alias_type& alias) const { alias.underlying_array = NULL; alias.offsets = offsets; alias.nb_offsets = nb_offsets; alias.edges = edges; } alias_type get_alias() const { alias_type alias; alias.underlying_array = NULL; alias.offsets = offsets; alias.nb_offsets = nb_offsets; alias.edges = edges; return alias; } void clear() { if (underlying_array != NULL) data::myfree(underlying_array); offsets = NULL; edges = NULL; } vtxid_type degree(vtxid_type v) const { assert(v >= 0); assert(v < size()); return offsets[v + 1] - offsets[v]; } value_type operator[](vtxid_type ix) const { assert(ix >= 0); assert(ix < size()); return value_type(vertex_seq_type(&edges[offsets[ix]], degree(ix))); } vtxid_type size() const { return nb_offsets - 1; } void swap(self_type& other) { std::swap(underlying_array, other.underlying_array); std::swap(offsets, other.offsets); std::swap(nb_offsets, other.nb_offsets); std::swap(edges, other.edges); } void alloc(size_type) { util::atomic::die("unsupported"); } void init(char* bytes, vtxid_type nb_vertices, edgeid_type nb_edges) { nb_offsets = nb_vertices + 1; underlying_array = bytes; offsets = (vtxid_type*)bytes; edges = &offsets[nb_offsets]; } value_type* data() { util::atomic::die("unsupported"); return NULL; } }; template <class Vertex_id, bool Is_alias = false> using flat_adjlist = adjlist<flat_adjlist_seq<Vertex_id, Is_alias>>; template <class Vertex_id> using flat_adjlist_alias = flat_adjlist<Vertex_id, true>; } // end namespace } // end namespace /***********************************************************************/ #endif /*! _PASL_GRAPH_ADJLIST_H_ */
laxmandhulipala/PWSA-Star
xytheta/pwsa_project/PWSA/pasl/graph/include/adjlist.hpp
C++
apache-2.0
8,982
package org.anddev.andengine.opengl.texture; import java.util.*; import org.anddev.andengine.opengl.texture.source.*; import org.anddev.andengine.util.*; import org.anddev.andengine.opengl.texture.builder.*; import android.graphics.*; public class BuildableTexture extends Texture { private final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace; public BuildableTexture(final int n, final int n2) { super(n, n2, TextureOptions.DEFAULT, null); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final ITextureStateListener textureStateListener) { super(n, n2, TextureOptions.DEFAULT, textureStateListener); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions) throws IllegalArgumentException { super(n, n2, textureOptions, null); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions, final ITextureStateListener textureStateListener) throws IllegalArgumentException { super(n, n2, textureOptions, textureStateListener); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } @Deprecated @Override public TextureSourceWithLocation addTextureSource(final ITextureSource textureSource, final int n, final int n2) { return super.addTextureSource(textureSource, n, n2); } public void addTextureSource(final ITextureSource textureSource, final Callback<TextureSourceWithLocation> callback) { this.mTextureSourcesToPlace.add(new TextureSourceWithWithLocationCallback(textureSource, callback)); } public void build(final ITextureBuilder textureBuilder) throws ITextureBuilder.TextureSourcePackingException { textureBuilder.pack(this, this.mTextureSourcesToPlace); this.mTextureSourcesToPlace.clear(); this.mUpdateOnHardwareNeeded = true; } @Override public void clearTextureSources() { super.clearTextureSources(); this.mTextureSourcesToPlace.clear(); } public void removeTextureSource(final ITextureSource textureSource) { final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace = this.mTextureSourcesToPlace; for (int i = -1 + mTextureSourcesToPlace.size(); i >= 0; --i) { if (mTextureSourcesToPlace.get(i).mTextureSource == textureSource) { mTextureSourcesToPlace.remove(i); this.mUpdateOnHardwareNeeded = true; return; } } } public static class TextureSourceWithWithLocationCallback implements ITextureSource { private final Callback<TextureSourceWithLocation> mCallback; private final ITextureSource mTextureSource; public TextureSourceWithWithLocationCallback(final ITextureSource mTextureSource, final Callback<TextureSourceWithLocation> mCallback) { super(); this.mTextureSource = mTextureSource; this.mCallback = mCallback; } @Override public TextureSourceWithWithLocationCallback clone() { return null; } public Callback<TextureSourceWithLocation> getCallback() { return this.mCallback; } @Override public int getHeight() { return this.mTextureSource.getHeight(); } public ITextureSource getTextureSource() { return this.mTextureSource; } @Override public int getWidth() { return this.mTextureSource.getWidth(); } @Override public Bitmap onLoadBitmap() { return this.mTextureSource.onLoadBitmap(); } @Override public String toString() { return this.mTextureSource.toString(); } } }
rLadia/AttacknidPatch
decompiled_src/Procyon/org/anddev/andengine/opengl/texture/BuildableTexture.java
Java
apache-2.0
4,239
/** vim: et:ts=4:sw=4:sts=4 * @license RequireJS 2.1.15 Copyright (c) 2010-2014, The Dojo Foundation All Rights Reserved. * Available via the MIT or new BSD license. * see: http://github.com/jrburke/requirejs for details */ //Not using strict: uneven strict support in browsers, #392, and causes //problems with requirejs.exec()/transpiler plugins that may not be strict. /*jslint regexp: true, nomen: true, sloppy: true */ /*global window, navigator, document, importScripts, setTimeout, opera */ var requirejs, require, define; (function (global) { var req, s, head, baseElement, dataMain, src, interactiveScript, currentlyAddingScript, mainScript, subPath, version = '2.1.15', commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg, cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g, jsSuffixRegExp = /\.js$/, currDirRegExp = /^\.\//, op = Object.prototype, ostring = op.toString, hasOwn = op.hasOwnProperty, ap = Array.prototype, apsp = ap.splice, isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document), isWebWorker = !isBrowser && typeof importScripts !== 'undefined', //PS3 indicates loaded and complete, but need to wait for complete //specifically. Sequence is 'loading', 'loaded', execution, // then 'complete'. The UA check is unfortunate, but not sure how //to feature test w/o causing perf issues. readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ? /^complete$/ : /^(complete|loaded)$/, defContextName = '_', //Oh the tragedy, detecting opera. See the usage of isOpera for reason. isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]', contexts = {}, cfg = {}, globalDefQueue = [], useInteractive = false; function isFunction(it) { return ostring.call(it) === '[object Function]'; } function isArray(it) { return ostring.call(it) === '[object Array]'; } /** * Helper function for iterating over an array. If the func returns * a true value, it will break out of the loop. */ function each(ary, func) { if (ary) { var i; for (i = 0; i < ary.length; i += 1) { if (ary[i] && func(ary[i], i, ary)) { break; } } } } /** * Helper function for iterating over an array backwards. If the func * returns a true value, it will break out of the loop. */ function eachReverse(ary, func) { if (ary) { var i; for (i = ary.length - 1; i > -1; i -= 1) { if (ary[i] && func(ary[i], i, ary)) { break; } } } } function hasProp(obj, prop) { return hasOwn.call(obj, prop); } function getOwn(obj, prop) { return hasProp(obj, prop) && obj[prop]; } /** * Cycles over properties in an object and calls a function for each * property value. If the function returns a truthy value, then the * iteration is stopped. */ function eachProp(obj, func) { var prop; for (prop in obj) { if (hasProp(obj, prop)) { if (func(obj[prop], prop)) { break; } } } } /** * Simple function to mix in properties from source into target, * but only if target does not already have a property of the same name. */ function mixin(target, source, force, deepStringMixin) { if (source) { eachProp(source, function (value, prop) { if (force || !hasProp(target, prop)) { if (deepStringMixin && typeof value === 'object' && value && !isArray(value) && !isFunction(value) && !(value instanceof RegExp)) { if (!target[prop]) { target[prop] = {}; } mixin(target[prop], value, force, deepStringMixin); } else { target[prop] = value; } } }); } return target; } //Similar to Function.prototype.bind, but the 'this' object is specified //first, since it is easier to read/figure out what 'this' will be. function bind(obj, fn) { return function () { return fn.apply(obj, arguments); }; } function scripts() { return document.getElementsByTagName('script'); } function defaultOnError(err) { throw err; } //Allow getting a global that is expressed in //dot notation, like 'a.b.c'. function getGlobal(value) { if (!value) { return value; } var g = global; each(value.split('.'), function (part) { g = g[part]; }); return g; } /** * Constructs an error with a pointer to an URL with more information. * @param {String} id the error ID that maps to an ID on a web page. * @param {String} message human readable error. * @param {Error} [err] the original error, if there is one. * * @returns {Error} */ function makeError(id, msg, err, requireModules) { var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id); e.requireType = id; e.requireModules = requireModules; if (err) { e.originalError = err; } return e; } if (typeof define !== 'undefined') { //If a define is already in play via another AMD loader, //do not overwrite. return; } if (typeof requirejs !== 'undefined') { if (isFunction(requirejs)) { //Do not overwrite an existing requirejs instance. return; } cfg = requirejs; requirejs = undefined; } //Allow for a require config object if (typeof require !== 'undefined' && !isFunction(require)) { //assume it is a config object. cfg = require; require = undefined; } function newContext(contextName) { var inCheckLoaded, Module, context, handlers, checkLoadedTimeoutId, config = { //Defaults. Do not set a default for map //config to speed up normalize(), which //will run faster if there is no default. waitSeconds: 7, baseUrl: './', paths: {}, bundles: {}, pkgs: {}, shim: {}, config: {} }, registry = {}, //registry of just enabled modules, to speed //cycle breaking code when lots of modules //are registered, but not activated. enabledRegistry = {}, undefEvents = {}, defQueue = [], defined = {}, urlFetched = {}, bundlesMap = {}, requireCounter = 1, unnormalizedCounter = 1; /** * Trims the . and .. from an array of path segments. * It will keep a leading path segment if a .. will become * the first path segment, to help with module name lookups, * which act like paths, but can be remapped. But the end result, * all paths that use this function should look normalized. * NOTE: this method MODIFIES the input array. * @param {Array} ary the array of path segments. */ function trimDots(ary) { var i, part; for (i = 0; i < ary.length; i++) { part = ary[i]; if (part === '.') { ary.splice(i, 1); i -= 1; } else if (part === '..') { // If at the start, or previous value is still .., // keep them so that when converted to a path it may // still work when converted to a path, even though // as an ID it is less than ideal. In larger point // releases, may be better to just kick out an error. if (i === 0 || (i == 1 && ary[2] === '..') || ary[i - 1] === '..') { continue; } else if (i > 0) { ary.splice(i - 1, 2); i -= 2; } } } } /** * Given a relative module name, like ./something, normalize it to * a real name that can be mapped to a path. * @param {String} name the relative name * @param {String} baseName a real name that the name arg is relative * to. * @param {Boolean} applyMap apply the map config to the value. Should * only be done if this normalization is for a dependency ID. * @returns {String} normalized name */ function normalize(name, baseName, applyMap) { var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex, foundMap, foundI, foundStarMap, starI, normalizedBaseParts, baseParts = (baseName && baseName.split('/')), map = config.map, starMap = map && map['*']; //Adjust any relative paths. if (name) { name = name.split('/'); lastIndex = name.length - 1; // If wanting node ID compatibility, strip .js from end // of IDs. Have to do this here, and not in nameToUrl // because node allows either .js or non .js to map // to same file. if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) { name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, ''); } // Starts with a '.' so need the baseName if (name[0].charAt(0) === '.' && baseParts) { //Convert baseName to array, and lop off the last part, //so that . matches that 'directory' and not name of the baseName's //module. For instance, baseName of 'one/two/three', maps to //'one/two/three.js', but we want the directory, 'one/two' for //this normalization. normalizedBaseParts = baseParts.slice(0, baseParts.length - 1); name = normalizedBaseParts.concat(name); } trimDots(name); name = name.join('/'); } //Apply map config if available. if (applyMap && map && (baseParts || starMap)) { nameParts = name.split('/'); outerLoop: for (i = nameParts.length; i > 0; i -= 1) { nameSegment = nameParts.slice(0, i).join('/'); if (baseParts) { //Find the longest baseName segment match in the config. //So, do joins on the biggest to smallest lengths of baseParts. for (j = baseParts.length; j > 0; j -= 1) { mapValue = getOwn(map, baseParts.slice(0, j).join('/')); //baseName segment has config, find if it has one for //this name. if (mapValue) { mapValue = getOwn(mapValue, nameSegment); if (mapValue) { //Match, update name to the new value. foundMap = mapValue; foundI = i; break outerLoop; } } } } //Check for a star map match, but just hold on to it, //if there is a shorter segment match later in a matching //config, then favor over this star map. if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) { foundStarMap = getOwn(starMap, nameSegment); starI = i; } } if (!foundMap && foundStarMap) { foundMap = foundStarMap; foundI = starI; } if (foundMap) { nameParts.splice(0, foundI, foundMap); name = nameParts.join('/'); } } // If the name points to a package's name, use // the package main instead. pkgMain = getOwn(config.pkgs, name); return pkgMain ? pkgMain : name; } function removeScript(name) { if (isBrowser) { each(scripts(), function (scriptNode) { if (scriptNode.getAttribute('data-requiremodule') === name && scriptNode.getAttribute('data-requirecontext') === context.contextName) { scriptNode.parentNode.removeChild(scriptNode); return true; } }); } } function hasPathFallback(id) { var pathConfig = getOwn(config.paths, id); if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) { //Pop off the first array value, since it failed, and //retry pathConfig.shift(); context.require.undef(id); //Custom require that does not do map translation, since //ID is "absolute", already mapped/resolved. context.makeRequire(null, { skipMap: true })([id]); return true; } } //Turns a plugin!resource to [plugin, resource] //with the plugin being undefined if the name //did not have a plugin prefix. function splitPrefix(name) { var prefix, index = name ? name.indexOf('!') : -1; if (index > -1) { prefix = name.substring(0, index); name = name.substring(index + 1, name.length); } return [prefix, name]; } /** * Creates a module mapping that includes plugin prefix, module * name, and path. If parentModuleMap is provided it will * also normalize the name via require.normalize() * * @param {String} name the module name * @param {String} [parentModuleMap] parent module map * for the module name, used to resolve relative names. * @param {Boolean} isNormalized: is the ID already normalized. * This is true if this call is done for a define() module ID. * @param {Boolean} applyMap: apply the map config to the ID. * Should only be true if this map is for a dependency. * * @returns {Object} */ function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) { var url, pluginModule, suffix, nameParts, prefix = null, parentName = parentModuleMap ? parentModuleMap.name : null, originalName = name, isDefine = true, normalizedName = ''; //If no name, then it means it is a require call, generate an //internal name. if (!name) { isDefine = false; name = '_@r' + (requireCounter += 1); } nameParts = splitPrefix(name); prefix = nameParts[0]; name = nameParts[1]; if (prefix) { prefix = normalize(prefix, parentName, applyMap); pluginModule = getOwn(defined, prefix); } //Account for relative paths if there is a base name. if (name) { if (prefix) { if (pluginModule && pluginModule.normalize) { //Plugin is loaded, use its normalize method. normalizedName = pluginModule.normalize(name, function (name) { return normalize(name, parentName, applyMap); }); } else { // If nested plugin references, then do not try to // normalize, as it will not normalize correctly. This // places a restriction on resourceIds, and the longer // term solution is not to normalize until plugins are // loaded and all normalizations to allow for async // loading of a loader plugin. But for now, fixes the // common uses. Details in #1131 normalizedName = name.indexOf('!') === -1 ? normalize(name, parentName, applyMap) : name; } } else { //A regular module. normalizedName = normalize(name, parentName, applyMap); //Normalized name may be a plugin ID due to map config //application in normalize. The map config values must //already be normalized, so do not need to redo that part. nameParts = splitPrefix(normalizedName); prefix = nameParts[0]; normalizedName = nameParts[1]; isNormalized = true; url = context.nameToUrl(normalizedName); } } //If the id is a plugin id that cannot be determined if it needs //normalization, stamp it with a unique ID so two matching relative //ids that may conflict can be separate. suffix = prefix && !pluginModule && !isNormalized ? '_unnormalized' + (unnormalizedCounter += 1) : ''; return { prefix: prefix, name: normalizedName, parentMap: parentModuleMap, unnormalized: !!suffix, url: url, originalName: originalName, isDefine: isDefine, id: (prefix ? prefix + '!' + normalizedName : normalizedName) + suffix }; } function getModule(depMap) { var id = depMap.id, mod = getOwn(registry, id); if (!mod) { mod = registry[id] = new context.Module(depMap); } return mod; } function on(depMap, name, fn) { var id = depMap.id, mod = getOwn(registry, id); if (hasProp(defined, id) && (!mod || mod.defineEmitComplete)) { if (name === 'defined') { fn(defined[id]); } } else { mod = getModule(depMap); if (mod.error && name === 'error') { fn(mod.error); } else { mod.on(name, fn); } } } function onError(err, errback) { var ids = err.requireModules, notified = false; if (errback) { errback(err); } else { each(ids, function (id) { var mod = getOwn(registry, id); if (mod) { //Set error on module, so it skips timeout checks. mod.error = err; if (mod.events.error) { notified = true; mod.emit('error', err); } } }); if (!notified) { req.onError(err); } } } /** * Internal method to transfer globalQueue items to this context's * defQueue. */ function takeGlobalQueue() { //Push all the globalDefQueue items into the context's defQueue if (globalDefQueue.length) { //Array splice in the values since the context code has a //local var ref to defQueue, so cannot just reassign the one //on context. apsp.apply(defQueue, [defQueue.length, 0].concat(globalDefQueue)); globalDefQueue = []; } } handlers = { 'require': function (mod) { if (mod.require) { return mod.require; } else { return (mod.require = context.makeRequire(mod.map)); } }, 'exports': function (mod) { mod.usingExports = true; if (mod.map.isDefine) { if (mod.exports) { return (defined[mod.map.id] = mod.exports); } else { return (mod.exports = defined[mod.map.id] = {}); } } }, 'module': function (mod) { if (mod.module) { return mod.module; } else { return (mod.module = { id: mod.map.id, uri: mod.map.url, config: function () { return getOwn(config.config, mod.map.id) || {}; }, exports: mod.exports || (mod.exports = {}) }); } } }; function cleanRegistry(id) { //Clean up machinery used for waiting modules. delete registry[id]; delete enabledRegistry[id]; } function breakCycle(mod, traced, processed) { var id = mod.map.id; if (mod.error) { mod.emit('error', mod.error); } else { traced[id] = true; each(mod.depMaps, function (depMap, i) { var depId = depMap.id, dep = getOwn(registry, depId); //Only force things that have not completed //being defined, so still in the registry, //and only if it has not been matched up //in the module already. if (dep && !mod.depMatched[i] && !processed[depId]) { if (getOwn(traced, depId)) { mod.defineDep(i, defined[depId]); mod.check(); //pass false? } else { breakCycle(dep, traced, processed); } } }); processed[id] = true; } } function checkLoaded() { var err, usingPathFallback, waitInterval = config.waitSeconds * 1000, //It is possible to disable the wait interval by using waitSeconds of 0. expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(), noLoads = [], reqCalls = [], stillLoading = false, needCycleCheck = true; //Do not bother if this call was a result of a cycle break. if (inCheckLoaded) { return; } inCheckLoaded = true; //Figure out the state of all the modules. eachProp(enabledRegistry, function (mod) { var map = mod.map, modId = map.id; //Skip things that are not enabled or in error state. if (!mod.enabled) { return; } if (!map.isDefine) { reqCalls.push(mod); } if (!mod.error) { //If the module should be executed, and it has not //been inited and time is up, remember it. if (!mod.inited && expired) { if (hasPathFallback(modId)) { usingPathFallback = true; stillLoading = true; } else { noLoads.push(modId); removeScript(modId); } } else if (!mod.inited && mod.fetched && map.isDefine) { stillLoading = true; if (!map.prefix) { //No reason to keep looking for unfinished //loading. If the only stillLoading is a //plugin resource though, keep going, //because it may be that a plugin resource //is waiting on a non-plugin cycle. return (needCycleCheck = false); } } } }); if (expired && noLoads.length) { //If wait time expired, throw error of unloaded modules. err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads); err.contextName = context.contextName; return onError(err); } //Not expired, check for a cycle. if (needCycleCheck) { each(reqCalls, function (mod) { breakCycle(mod, {}, {}); }); } //If still waiting on loads, and the waiting load is something //other than a plugin resource, or there are still outstanding //scripts, then just try back later. if ((!expired || usingPathFallback) && stillLoading) { //Something is still waiting to load. Wait for it, but only //if a timeout is not already in effect. if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) { checkLoadedTimeoutId = setTimeout(function () { checkLoadedTimeoutId = 0; checkLoaded(); }, 50); } } inCheckLoaded = false; } Module = function (map) { this.events = getOwn(undefEvents, map.id) || {}; this.map = map; this.shim = getOwn(config.shim, map.id); this.depExports = []; this.depMaps = []; this.depMatched = []; this.pluginMaps = {}; this.depCount = 0; /* this.exports this.factory this.depMaps = [], this.enabled, this.fetched */ }; Module.prototype = { init: function (depMaps, factory, errback, options) { options = options || {}; //Do not do more inits if already done. Can happen if there //are multiple define calls for the same module. That is not //a normal, common case, but it is also not unexpected. if (this.inited) { return; } this.factory = factory; if (errback) { //Register for errors on this module. this.on('error', errback); } else if (this.events.error) { //If no errback already, but there are error listeners //on this module, set up an errback to pass to the deps. errback = bind(this, function (err) { this.emit('error', err); }); } //Do a copy of the dependency array, so that //source inputs are not modified. For example //"shim" deps are passed in here directly, and //doing a direct modification of the depMaps array //would affect that config. this.depMaps = depMaps && depMaps.slice(0); this.errback = errback; //Indicate this module has be initialized this.inited = true; this.ignore = options.ignore; //Could have option to init this module in enabled mode, //or could have been previously marked as enabled. However, //the dependencies are not known until init is called. So //if enabled previously, now trigger dependencies as enabled. if (options.enabled || this.enabled) { //Enable this module and dependencies. //Will call this.check() this.enable(); } else { this.check(); } }, defineDep: function (i, depExports) { //Because of cycles, defined callback for a given //export can be called more than once. if (!this.depMatched[i]) { this.depMatched[i] = true; this.depCount -= 1; this.depExports[i] = depExports; } }, fetch: function () { if (this.fetched) { return; } this.fetched = true; context.startTime = (new Date()).getTime(); var map = this.map; //If the manager is for a plugin managed resource, //ask the plugin to load it now. if (this.shim) { context.makeRequire(this.map, { enableBuildCallback: true })(this.shim.deps || [], bind(this, function () { return map.prefix ? this.callPlugin() : this.load(); })); } else { //Regular dependency. return map.prefix ? this.callPlugin() : this.load(); } }, load: function () { var url = this.map.url; //Regular dependency. if (!urlFetched[url]) { urlFetched[url] = true; context.load(this.map.id, url); } }, /** * Checks if the module is ready to define itself, and if so, * define it. */ check: function () { if (!this.enabled || this.enabling) { return; } var err, cjsModule, id = this.map.id, depExports = this.depExports, exports = this.exports, factory = this.factory; if (!this.inited) { this.fetch(); } else if (this.error) { this.emit('error', this.error); } else if (!this.defining) { //The factory could trigger another require call //that would result in checking this module to //define itself again. If already in the process //of doing that, skip this work. this.defining = true; if (this.depCount < 1 && !this.defined) { if (isFunction(factory)) { //If there is an error listener, favor passing //to that instead of throwing an error. However, //only do it for define()'d modules. require //errbacks should not be called for failures in //their callbacks (#699). However if a global //onError is set, use that. if ((this.events.error && this.map.isDefine) || req.onError !== defaultOnError) { try { exports = context.execCb(id, factory, depExports, exports); } catch (e) { err = e; } } else { exports = context.execCb(id, factory, depExports, exports); } // Favor return value over exports. If node/cjs in play, // then will not have a return value anyway. Favor // module.exports assignment over exports object. if (this.map.isDefine && exports === undefined) { cjsModule = this.module; if (cjsModule) { exports = cjsModule.exports; } else if (this.usingExports) { //exports already set the defined value. exports = this.exports; } } if (err) { err.requireMap = this.map; err.requireModules = this.map.isDefine ? [this.map.id] : null; err.requireType = this.map.isDefine ? 'define' : 'require'; return onError((this.error = err)); } } else { //Just a literal value exports = factory; } this.exports = exports; if (this.map.isDefine && !this.ignore) { defined[id] = exports; if (req.onResourceLoad) { req.onResourceLoad(context, this.map, this.depMaps); } } //Clean up cleanRegistry(id); this.defined = true; } //Finished the define stage. Allow calling check again //to allow define notifications below in the case of a //cycle. this.defining = false; if (this.defined && !this.defineEmitted) { this.defineEmitted = true; this.emit('defined', this.exports); this.defineEmitComplete = true; } } }, callPlugin: function () { var map = this.map, id = map.id, //Map already normalized the prefix. pluginMap = makeModuleMap(map.prefix); //Mark this as a dependency for this plugin, so it //can be traced for cycles. this.depMaps.push(pluginMap); on(pluginMap, 'defined', bind(this, function (plugin) { var load, normalizedMap, normalizedMod, bundleId = getOwn(bundlesMap, this.map.id), name = this.map.name, parentName = this.map.parentMap ? this.map.parentMap.name : null, localRequire = context.makeRequire(map.parentMap, { enableBuildCallback: true }); //If current map is not normalized, wait for that //normalized name to load instead of continuing. if (this.map.unnormalized) { //Normalize the ID if the plugin allows it. if (plugin.normalize) { name = plugin.normalize(name, function (name) { return normalize(name, parentName, true); }) || ''; } //prefix and name should already be normalized, no need //for applying map config again either. normalizedMap = makeModuleMap(map.prefix + '!' + name, this.map.parentMap); on(normalizedMap, 'defined', bind(this, function (value) { this.init([], function () { return value; }, null, { enabled: true, ignore: true }); })); normalizedMod = getOwn(registry, normalizedMap.id); if (normalizedMod) { //Mark this as a dependency for this plugin, so it //can be traced for cycles. this.depMaps.push(normalizedMap); if (this.events.error) { normalizedMod.on('error', bind(this, function (err) { this.emit('error', err); })); } normalizedMod.enable(); } return; } //If a paths config, then just load that file instead to //resolve the plugin, as it is built into that paths layer. if (bundleId) { this.map.url = context.nameToUrl(bundleId); this.load(); return; } load = bind(this, function (value) { this.init([], function () { return value; }, null, { enabled: true }); }); load.error = bind(this, function (err) { this.inited = true; this.error = err; err.requireModules = [id]; //Remove temp unnormalized modules for this module, //since they will never be resolved otherwise now. eachProp(registry, function (mod) { if (mod.map.id.indexOf(id + '_unnormalized') === 0) { cleanRegistry(mod.map.id); } }); onError(err); }); //Allow plugins to load other code without having to know the //context or how to 'complete' the load. load.fromText = bind(this, function (text, textAlt) { /*jslint evil: true */ var moduleName = map.name, moduleMap = makeModuleMap(moduleName), hasInteractive = useInteractive; //As of 2.1.0, support just passing the text, to reinforce //fromText only being called once per resource. Still //support old style of passing moduleName but discard //that moduleName in favor of the internal ref. if (textAlt) { text = textAlt; } //Turn off interactive script matching for IE for any define //calls in the text, then turn it back on at the end. if (hasInteractive) { useInteractive = false; } //Prime the system by creating a module instance for //it. getModule(moduleMap); //Transfer any config to this other module. if (hasProp(config.config, id)) { config.config[moduleName] = config.config[id]; } try { req.exec(text); } catch (e) { return onError(makeError('fromtexteval', 'fromText eval for ' + id + ' failed: ' + e, e, [id])); } if (hasInteractive) { useInteractive = true; } //Mark this as a dependency for the plugin //resource this.depMaps.push(moduleMap); //Support anonymous modules. context.completeLoad(moduleName); //Bind the value of that module to the value for this //resource ID. localRequire([moduleName], load); }); //Use parentName here since the plugin's name is not reliable, //could be some weird string with no path that actually wants to //reference the parentName's path. plugin.load(map.name, localRequire, load, config); })); context.enable(pluginMap, this); this.pluginMaps[pluginMap.id] = pluginMap; }, enable: function () { enabledRegistry[this.map.id] = this; this.enabled = true; //Set flag mentioning that the module is enabling, //so that immediate calls to the defined callbacks //for dependencies do not trigger inadvertent load //with the depCount still being zero. this.enabling = true; //Enable each dependency each(this.depMaps, bind(this, function (depMap, i) { var id, mod, handler; if (typeof depMap === 'string') { //Dependency needs to be converted to a depMap //and wired up to this module. depMap = makeModuleMap(depMap, (this.map.isDefine ? this.map : this.map.parentMap), false, !this.skipMap); this.depMaps[i] = depMap; handler = getOwn(handlers, depMap.id); if (handler) { this.depExports[i] = handler(this); return; } this.depCount += 1; on(depMap, 'defined', bind(this, function (depExports) { this.defineDep(i, depExports); this.check(); })); if (this.errback) { on(depMap, 'error', bind(this, this.errback)); } } id = depMap.id; mod = registry[id]; //Skip special modules like 'require', 'exports', 'module' //Also, don't call enable if it is already enabled, //important in circular dependency cases. if (!hasProp(handlers, id) && mod && !mod.enabled) { context.enable(depMap, this); } })); //Enable each plugin that is used in //a dependency eachProp(this.pluginMaps, bind(this, function (pluginMap) { var mod = getOwn(registry, pluginMap.id); if (mod && !mod.enabled) { context.enable(pluginMap, this); } })); this.enabling = false; this.check(); }, on: function (name, cb) { var cbs = this.events[name]; if (!cbs) { cbs = this.events[name] = []; } cbs.push(cb); }, emit: function (name, evt) { each(this.events[name], function (cb) { cb(evt); }); if (name === 'error') { //Now that the error handler was triggered, remove //the listeners, since this broken Module instance //can stay around for a while in the registry. delete this.events[name]; } } }; function callGetModule(args) { //Skip modules already defined. if (!hasProp(defined, args[0])) { getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]); } } function removeListener(node, func, name, ieName) { //Favor detachEvent because of IE9 //issue, see attachEvent/addEventListener comment elsewhere //in this file. if (node.detachEvent && !isOpera) { //Probably IE. If not it will throw an error, which will be //useful to know. if (ieName) { node.detachEvent(ieName, func); } } else { node.removeEventListener(name, func, false); } } /** * Given an event from a script node, get the requirejs info from it, * and then removes the event listeners on the node. * @param {Event} evt * @returns {Object} */ function getScriptData(evt) { //Using currentTarget instead of target for Firefox 2.0's sake. Not //all old browsers will be supported, but this one was easy enough //to support and still makes sense. var node = evt.currentTarget || evt.srcElement; //Remove the listeners once here. removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange'); removeListener(node, context.onScriptError, 'error'); return { node: node, id: node && node.getAttribute('data-requiremodule') }; } function intakeDefines() { var args; //Any defined modules in the global queue, intake them now. takeGlobalQueue(); //Make sure any remaining defQueue items get properly processed. while (defQueue.length) { args = defQueue.shift(); if (args[0] === null) { return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' + args[args.length - 1])); } else { //args are id, deps, factory. Should be normalized by the //define() function. callGetModule(args); } } } context = { config: config, contextName: contextName, registry: registry, defined: defined, urlFetched: urlFetched, defQueue: defQueue, Module: Module, makeModuleMap: makeModuleMap, nextTick: req.nextTick, onError: onError, /** * Set a configuration for the context. * @param {Object} cfg config object to integrate. */ configure: function (cfg) { //Make sure the baseUrl ends in a slash. if (cfg.baseUrl) { if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') { cfg.baseUrl += '/'; } } //Save off the paths since they require special processing, //they are additive. var shim = config.shim, objs = { paths: true, bundles: true, config: true, map: true }; eachProp(cfg, function (value, prop) { if (objs[prop]) { if (!config[prop]) { config[prop] = {}; } mixin(config[prop], value, true, true); } else { config[prop] = value; } }); //Reverse map the bundles if (cfg.bundles) { eachProp(cfg.bundles, function (value, prop) { each(value, function (v) { if (v !== prop) { bundlesMap[v] = prop; } }); }); } //Merge shim if (cfg.shim) { eachProp(cfg.shim, function (value, id) { //Normalize the structure if (isArray(value)) { value = { deps: value }; } if ((value.exports || value.init) && !value.exportsFn) { value.exportsFn = context.makeShimExports(value); } shim[id] = value; }); config.shim = shim; } //Adjust packages if necessary. if (cfg.packages) { each(cfg.packages, function (pkgObj) { var location, name; pkgObj = typeof pkgObj === 'string' ? { name: pkgObj } : pkgObj; name = pkgObj.name; location = pkgObj.location; if (location) { config.paths[name] = pkgObj.location; } //Save pointer to main module ID for pkg name. //Remove leading dot in main, so main paths are normalized, //and remove any trailing .js, since different package //envs have different conventions: some use a module name, //some use a file name. config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main') .replace(currDirRegExp, '') .replace(jsSuffixRegExp, ''); }); } //If there are any "waiting to execute" modules in the registry, //update the maps for them, since their info, like URLs to load, //may have changed. eachProp(registry, function (mod, id) { //If module already has init called, since it is too //late to modify them, and ignore unnormalized ones //since they are transient. if (!mod.inited && !mod.map.unnormalized) { mod.map = makeModuleMap(id); } }); //If a deps array or a config callback is specified, then call //require with those args. This is useful when require is defined as a //config object before require.js is loaded. if (cfg.deps || cfg.callback) { context.require(cfg.deps || [], cfg.callback); } }, makeShimExports: function (value) { function fn() { var ret; if (value.init) { ret = value.init.apply(global, arguments); } return ret || (value.exports && getGlobal(value.exports)); } return fn; }, makeRequire: function (relMap, options) { options = options || {}; function localRequire(deps, callback, errback) { var id, map, requireMod; if (options.enableBuildCallback && callback && isFunction(callback)) { callback.__requireJsBuild = true; } if (typeof deps === 'string') { if (isFunction(callback)) { //Invalid call return onError(makeError('requireargs', 'Invalid require call'), errback); } //If require|exports|module are requested, get the //value for them from the special handlers. Caveat: //this only works while module is being defined. if (relMap && hasProp(handlers, deps)) { return handlers[deps](registry[relMap.id]); } //Synchronous access to one module. If require.get is //available (as in the Node adapter), prefer that. if (req.get) { return req.get(context, deps, relMap, localRequire); } //Normalize module name, if it contains . or .. map = makeModuleMap(deps, relMap, false, true); id = map.id; if (!hasProp(defined, id)) { return onError(makeError('notloaded', 'Module name "' + id + '" has not been loaded yet for context: ' + contextName + (relMap ? '' : '. Use require([])'))); } return defined[id]; } //Grab defines waiting in the global queue. intakeDefines(); //Mark all the dependencies as needing to be loaded. context.nextTick(function () { //Some defines could have been added since the //require call, collect them. intakeDefines(); requireMod = getModule(makeModuleMap(null, relMap)); //Store if map config should be applied to this require //call for dependencies. requireMod.skipMap = options.skipMap; requireMod.init(deps, callback, errback, { enabled: true }); checkLoaded(); }); return localRequire; } mixin(localRequire, { isBrowser: isBrowser, /** * Converts a module name + .extension into an URL path. * *Requires* the use of a module name. It does not support using * plain URLs like nameToUrl. */ toUrl: function (moduleNamePlusExt) { var ext, index = moduleNamePlusExt.lastIndexOf('.'), segment = moduleNamePlusExt.split('/')[0], isRelative = segment === '.' || segment === '..'; //Have a file extension alias, and it is not the //dots from a relative path. if (index !== -1 && (!isRelative || index > 1)) { ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length); moduleNamePlusExt = moduleNamePlusExt.substring(0, index); } return context.nameToUrl(normalize(moduleNamePlusExt, relMap && relMap.id, true), ext, true); }, defined: function (id) { return hasProp(defined, makeModuleMap(id, relMap, false, true).id); }, specified: function (id) { id = makeModuleMap(id, relMap, false, true).id; return hasProp(defined, id) || hasProp(registry, id); } }); //Only allow undef on top level require calls if (!relMap) { localRequire.undef = function (id) { //Bind any waiting define() calls to this context, //fix for #408 takeGlobalQueue(); var map = makeModuleMap(id, relMap, true), mod = getOwn(registry, id); removeScript(id); delete defined[id]; delete urlFetched[map.url]; delete undefEvents[id]; //Clean queued defines too. Go backwards //in array so that the splices do not //mess up the iteration. eachReverse(defQueue, function (args, i) { if (args[0] === id) { defQueue.splice(i, 1); } }); if (mod) { //Hold on to listeners in case the //module will be attempted to be reloaded //using a different config. if (mod.events.defined) { undefEvents[id] = mod.events; } cleanRegistry(id); } }; } return localRequire; }, /** * Called to enable a module if it is still in the registry * awaiting enablement. A second arg, parent, the parent module, * is passed in for context, when this method is overridden by * the optimizer. Not shown here to keep code compact. */ enable: function (depMap) { var mod = getOwn(registry, depMap.id); if (mod) { getModule(depMap).enable(); } }, /** * Internal method used by environment adapters to complete a load event. * A load event could be a script load or just a load pass from a synchronous * load call. * @param {String} moduleName the name of the module to potentially complete. */ completeLoad: function (moduleName) { var found, args, mod, shim = getOwn(config.shim, moduleName) || {}, shExports = shim.exports; takeGlobalQueue(); while (defQueue.length) { args = defQueue.shift(); if (args[0] === null) { args[0] = moduleName; //If already found an anonymous module and bound it //to this name, then this is some other anon module //waiting for its completeLoad to fire. if (found) { break; } found = true; } else if (args[0] === moduleName) { //Found matching define call for this script! found = true; } callGetModule(args); } //Do this after the cycle of callGetModule in case the result //of those calls/init calls changes the registry. mod = getOwn(registry, moduleName); if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) { if (config.enforceDefine && (!shExports || !getGlobal(shExports))) { if (hasPathFallback(moduleName)) { return; } else { return onError(makeError('nodefine', 'No define call for ' + moduleName, null, [moduleName])); } } else { //A script that does not call define(), so just simulate //the call for it. callGetModule([moduleName, (shim.deps || []), shim.exportsFn]); } } checkLoaded(); }, /** * Converts a module name to a file path. Supports cases where * moduleName may actually be just an URL. * Note that it **does not** call normalize on the moduleName, * it is assumed to have already been normalized. This is an * internal API, not a public one. Use toUrl for the public API. */ nameToUrl: function (moduleName, ext, skipExt) { var paths, syms, i, parentModule, url, parentPath, bundleId, pkgMain = getOwn(config.pkgs, moduleName); if (pkgMain) { moduleName = pkgMain; } bundleId = getOwn(bundlesMap, moduleName); if (bundleId) { return context.nameToUrl(bundleId, ext, skipExt); } //If a colon is in the URL, it indicates a protocol is used and it is just //an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?) //or ends with .js, then assume the user meant to use an url and not a module id. //The slash is important for protocol-less URLs as well as full paths. if (req.jsExtRegExp.test(moduleName)) { //Just a plain path, not module name lookup, so just return it. //Add extension if it is included. This is a bit wonky, only non-.js things pass //an extension, this method probably needs to be reworked. url = moduleName + (ext || ''); } else { //A module that needs to be converted to a path. paths = config.paths; syms = moduleName.split('/'); //For each module name segment, see if there is a path //registered for it. Start with most specific name //and work up from it. for (i = syms.length; i > 0; i -= 1) { parentModule = syms.slice(0, i).join('/'); parentPath = getOwn(paths, parentModule); if (parentPath) { //If an array, it means there are a few choices, //Choose the one that is desired if (isArray(parentPath)) { parentPath = parentPath[0]; } syms.splice(0, i, parentPath); break; } } //Join the path parts together, then figure out if baseUrl is needed. url = syms.join('/'); url += (ext || (/^data\:|\?/.test(url) || skipExt ? '' : '.js')); url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url; } return config.urlArgs ? url + ((url.indexOf('?') === -1 ? '?' : '&') + config.urlArgs) : url; }, //Delegates to req.load. Broken out as a separate function to //allow overriding in the optimizer. load: function (id, url) { req.load(context, id, url); }, /** * Executes a module callback function. Broken out as a separate function * solely to allow the build system to sequence the files in the built * layer in the right sequence. * * @private */ execCb: function (name, callback, args, exports) { return callback.apply(exports, args); }, /** * callback for script loads, used to check status of loading. * * @param {Event} evt the event from the browser for the script * that was loaded. */ onScriptLoad: function (evt) { //Using currentTarget instead of target for Firefox 2.0's sake. Not //all old browsers will be supported, but this one was easy enough //to support and still makes sense. if (evt.type === 'load' || (readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) { //Reset interactive script so a script node is not held onto for //to long. interactiveScript = null; //Pull out the name of the module and the context. var data = getScriptData(evt); context.completeLoad(data.id); } }, /** * Callback for script errors. */ onScriptError: function (evt) { var data = getScriptData(evt); if (!hasPathFallback(data.id)) { return onError(makeError('scripterror', 'Script error for: ' + data.id, evt, [data.id])); } } }; context.require = context.makeRequire(); return context; } /** * Main entry point. * * If the only argument to require is a string, then the module that * is represented by that string is fetched for the appropriate context. * * If the first argument is an array, then it will be treated as an array * of dependency string names to fetch. An optional function callback can * be specified to execute when all of those dependencies are available. * * Make a local req variable to help Caja compliance (it assumes things * on a require that are not standardized), and to give a short * name for minification/local scope use. */ req = requirejs = function (deps, callback, errback, optional) { //Find the right context, use default var context, config, contextName = defContextName; // Determine if have config object in the call. if (!isArray(deps) && typeof deps !== 'string') { // deps is a config object config = deps; if (isArray(callback)) { // Adjust args if there are dependencies deps = callback; callback = errback; errback = optional; } else { deps = []; } } if (config && config.context) { contextName = config.context; } context = getOwn(contexts, contextName); if (!context) { context = contexts[contextName] = req.s.newContext(contextName); } if (config) { context.configure(config); } return context.require(deps, callback, errback); }; /** * Support require.config() to make it easier to cooperate with other * AMD loaders on globally agreed names. */ req.config = function (config) { return req(config); }; /** * Execute something after the current tick * of the event loop. Override for other envs * that have a better solution than setTimeout. * @param {Function} fn function to execute later. */ req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) { setTimeout(fn, 4); } : function (fn) { fn(); }; /** * Export require as a global, but only if it does not already exist. */ if (!require) { require = req; } req.version = version; //Used to filter out dependencies that are already paths. req.jsExtRegExp = /^\/|:|\?|\.js$/; req.isBrowser = isBrowser; s = req.s = { contexts: contexts, newContext: newContext }; //Create default context. req({}); //Exports some context-sensitive methods on global require. each([ 'toUrl', 'undef', 'defined', 'specified' ], function (prop) { //Reference from contexts instead of early binding to default context, //so that during builds, the latest instance of the default context //with its config gets used. req[prop] = function () { var ctx = contexts[defContextName]; return ctx.require[prop].apply(ctx, arguments); }; }); if (isBrowser) { head = s.head = document.getElementsByTagName('head')[0]; //If BASE tag is in play, using appendChild is a problem for IE6. //When that browser dies, this can be removed. Details in this jQuery bug: //http://dev.jquery.com/ticket/2709 baseElement = document.getElementsByTagName('base')[0]; if (baseElement) { head = s.head = baseElement.parentNode; } } /** * Any errors that require explicitly generates will be passed to this * function. Intercept/override it if you want custom error handling. * @param {Error} err the error object. */ req.onError = defaultOnError; /** * Creates the node for the load command. Only used in browser envs. */ req.createNode = function (config, moduleName, url) { var node = config.xhtml ? document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') : document.createElement('script'); node.type = config.scriptType || 'text/javascript'; node.charset = 'utf-8'; node.async = true; return node; }; /** * Does the request to load a module for the browser case. * Make this a separate function to allow other environments * to override it. * * @param {Object} context the require context to find state. * @param {String} moduleName the name of the module. * @param {Object} url the URL to the module. */ req.load = function (context, moduleName, url) { var config = (context && context.config) || {}, node; if (isBrowser) { //In the browser so use a script tag node = req.createNode(config, moduleName, url); node.setAttribute('data-requirecontext', context.contextName); node.setAttribute('data-requiremodule', moduleName); //Set up load listener. Test attachEvent first because IE9 has //a subtle issue in its addEventListener and script onload firings //that do not match the behavior of all other browsers with //addEventListener support, which fire the onload event for a //script right after the script execution. See: //https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution //UNFORTUNATELY Opera implements attachEvent but does not follow the script //script execution mode. if (node.attachEvent && //Check if node.attachEvent is artificially added by custom script or //natively supported by browser //read https://github.com/jrburke/requirejs/issues/187 //if we can NOT find [native code] then it must NOT natively supported. //in IE8, node.attachEvent does not have toString() //Note the test for "[native code" with no closing brace, see: //https://github.com/jrburke/requirejs/issues/273 !(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) && !isOpera) { //Probably IE. IE (at least 6-8) do not fire //script onload right after executing the script, so //we cannot tie the anonymous define call to a name. //However, IE reports the script as being in 'interactive' //readyState at the time of the define call. useInteractive = true; node.attachEvent('onreadystatechange', context.onScriptLoad); //It would be great to add an error handler here to catch //404s in IE9+. However, onreadystatechange will fire before //the error handler, so that does not help. If addEventListener //is used, then IE will fire error before load, but we cannot //use that pathway given the connect.microsoft.com issue //mentioned above about not doing the 'script execute, //then fire the script load event listener before execute //next script' that other browsers do. //Best hope: IE10 fixes the issues, //and then destroys all installs of IE 6-9. //node.attachEvent('onerror', context.onScriptError); } else { node.addEventListener('load', context.onScriptLoad, false); node.addEventListener('error', context.onScriptError, false); } node.src = url; //For some cache cases in IE 6-8, the script executes before the end //of the appendChild execution, so to tie an anonymous define //call to the module name (which is stored on the node), hold on //to a reference to this node, but clear after the DOM insertion. currentlyAddingScript = node; if (baseElement) { head.insertBefore(node, baseElement); } else { head.appendChild(node); } currentlyAddingScript = null; return node; } else if (isWebWorker) { try { //In a web worker, use importScripts. This is not a very //efficient use of importScripts, importScripts will block until //its script is downloaded and evaluated. However, if web workers //are in play, the expectation that a build has been done so that //only one script needs to be loaded anyway. This may need to be //reevaluated if other use cases become common. importScripts(url); //Account for anonymous modules context.completeLoad(moduleName); } catch (e) { context.onError(makeError('importscripts', 'importScripts failed for ' + moduleName + ' at ' + url, e, [moduleName])); } } }; function getInteractiveScript() { if (interactiveScript && interactiveScript.readyState === 'interactive') { return interactiveScript; } eachReverse(scripts(), function (script) { if (script.readyState === 'interactive') { return (interactiveScript = script); } }); return interactiveScript; } //Look for a data-main script attribute, which could also adjust the baseUrl. if (isBrowser && !cfg.skipDataMain) { //Figure out baseUrl. Get it from the script tag with require.js in it. eachReverse(scripts(), function (script) { //Set the 'head' where we can append children by //using the script's parent. if (!head) { head = script.parentNode; } //Look for a data-main attribute to set main script for the page //to load. If it is there, the path to data main becomes the //baseUrl, if it is not already set. dataMain = script.getAttribute('data-main'); if (dataMain) { //Preserve dataMain in case it is a path (i.e. contains '?') mainScript = dataMain; //Set final baseUrl if there is not already an explicit one. if (!cfg.baseUrl) { //Pull off the directory of data-main for use as the //baseUrl. src = mainScript.split('/'); mainScript = src.pop(); subPath = src.length ? src.join('/') + '/' : './'; cfg.baseUrl = subPath; } //Strip off any trailing .js since mainScript is now //like a module name. mainScript = mainScript.replace(jsSuffixRegExp, ''); //If mainScript is still a path, fall back to dataMain if (req.jsExtRegExp.test(mainScript)) { mainScript = dataMain; } //Put the data-main script in the files to load. cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript]; return true; } }); } /** * The function that handles definitions of modules. Differs from * require() in that a string for the module should be the first argument, * and the function to execute after dependencies are loaded should * return a value to define the module corresponding to the first argument's * name. */ define = function (name, deps, callback) { var node, context; //Allow for anonymous modules if (typeof name !== 'string') { //Adjust args appropriately callback = deps; deps = name; name = null; } //This module may not have dependencies if (!isArray(deps)) { callback = deps; deps = null; } //If no name, and callback is a function, then figure out if it a //CommonJS thing with dependencies. if (!deps && isFunction(callback)) { deps = []; //Remove comments from the callback string, //look for require calls, and pull them into the dependencies, //but only if there are function args. if (callback.length) { callback .toString() .replace(commentRegExp, '') .replace(cjsRequireRegExp, function (match, dep) { deps.push(dep); }); //May be a CommonJS thing even without require calls, but still //could use exports, and module. Avoid doing exports and module //work though if it just needs require. //REQUIRES the function to expect the CommonJS variables in the //order listed below. deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps); } } //If in IE 6-8 and hit an anonymous define() call, do the interactive //work. if (useInteractive) { node = currentlyAddingScript || getInteractiveScript(); if (node) { if (!name) { name = node.getAttribute('data-requiremodule'); } context = contexts[node.getAttribute('data-requirecontext')]; } } //Always save off evaluating the def call until the script onload handler. //This allows multiple modules to be in a file without prematurely //tracing dependencies, and allows for anonymous module support, //where the module name is not known until the script onload event //occurs. If no context, use the global queue, and get it processed //in the onscript load callback. (context ? context.defQueue : globalDefQueue).push([name, deps, callback]); }; define.amd = { jQuery: true }; /** * Executes the text. Normally just uses eval, but can be modified * to use a better, environment-specific call. Only used for transpiling * loader plugins, not for plain JS modules. * @param {String} text the text to execute/evaluate. */ req.exec = function (text) { /*jslint evil: true */ return eval(text); }; //Set up with config info. req(cfg); }(this));
suttonj/YoutubePlaylistCuratorChromeExtension
src/js/thirdParty/require.js
JavaScript
apache-2.0
83,052
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.dmn.engine.impl.parser; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.activiti.dmn.engine.ActivitiDmnException; import org.activiti.dmn.engine.DmnEngineConfiguration; import org.activiti.dmn.engine.impl.context.Context; import org.activiti.dmn.engine.impl.io.InputStreamSource; import org.activiti.dmn.engine.impl.io.ResourceStreamSource; import org.activiti.dmn.engine.impl.io.StreamSource; import org.activiti.dmn.engine.impl.io.StringStreamSource; import org.activiti.dmn.engine.impl.io.UrlStreamSource; import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntity; import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntity; import org.activiti.dmn.model.Decision; import org.activiti.dmn.model.DmnDefinition; import org.activiti.dmn.xml.constants.DmnXMLConstants; import org.activiti.dmn.xml.converter.DmnXMLConverter; import org.activiti.dmn.xml.exception.DmnXMLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Specific parsing of one BPMN 2.0 XML file, created by the {@link DmnParse}. * * @author Tijs Rademakers * @author Joram Barrez */ public class DmnParse implements DmnXMLConstants { protected static final Logger LOGGER = LoggerFactory.getLogger(DmnParse.class); protected String name; protected boolean validateSchema = true; protected StreamSource streamSource; protected String sourceSystemId; protected DmnDefinition dmnDefinition; protected String targetNamespace; /** The deployment to which the parsed decision tables will be added. */ protected DmnDeploymentEntity deployment; /** The end result of the parsing: a list of decision tables. */ protected List<DecisionTableEntity> decisionTables = new ArrayList<DecisionTableEntity>(); public DmnParse deployment(DmnDeploymentEntity deployment) { this.deployment = deployment; return this; } public DmnParse execute(DmnEngineConfiguration dmnEngineConfig) { try { DmnXMLConverter converter = new DmnXMLConverter(); boolean enableSafeDmnXml = dmnEngineConfig.isEnableSafeDmnXml(); String encoding = dmnEngineConfig.getXmlEncoding(); if (encoding != null) { dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml, encoding); } else { dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml); } if (dmnDefinition != null && dmnDefinition.getDecisions() != null) { for (Decision decision : dmnDefinition.getDecisions()) { DecisionTableEntity decisionTableEntity = Context.getDmnEngineConfiguration().getDecisionTableEntityManager().create(); decisionTableEntity.setKey(decision.getId()); decisionTableEntity.setName(decision.getName()); decisionTableEntity.setResourceName(name); decisionTableEntity.setDeploymentId(deployment.getId()); decisionTableEntity.setParentDeploymentId(deployment.getParentDeploymentId()); decisionTableEntity.setDescription(decision.getDescription()); decisionTables.add(decisionTableEntity); } } } catch (Exception e) { if (e instanceof ActivitiDmnException) { throw (ActivitiDmnException) e; } else if (e instanceof DmnXMLException) { throw (DmnXMLException) e; } else { throw new ActivitiDmnException("Error parsing XML", e); } } return this; } public DmnParse name(String name) { this.name = name; return this; } public DmnParse sourceInputStream(InputStream inputStream) { if (name == null) { name("inputStream"); } setStreamSource(new InputStreamSource(inputStream)); return this; } public DmnParse sourceUrl(URL url) { if (name == null) { name(url.toString()); } setStreamSource(new UrlStreamSource(url)); return this; } public DmnParse sourceUrl(String url) { try { return sourceUrl(new URL(url)); } catch (MalformedURLException e) { throw new ActivitiDmnException("malformed url: " + url, e); } } public DmnParse sourceResource(String resource) { if (name == null) { name(resource); } setStreamSource(new ResourceStreamSource(resource)); return this; } public DmnParse sourceString(String string) { if (name == null) { name("string"); } setStreamSource(new StringStreamSource(string)); return this; } protected void setStreamSource(StreamSource streamSource) { if (this.streamSource != null) { throw new ActivitiDmnException("invalid: multiple sources " + this.streamSource + " and " + streamSource); } this.streamSource = streamSource; } public String getSourceSystemId() { return sourceSystemId; } public DmnParse setSourceSystemId(String sourceSystemId) { this.sourceSystemId = sourceSystemId; return this; } /* * ------------------- GETTERS AND SETTERS ------------------- */ public boolean isValidateSchema() { return validateSchema; } public void setValidateSchema(boolean validateSchema) { this.validateSchema = validateSchema; } public List<DecisionTableEntity> getDecisionTables() { return decisionTables; } public String getTargetNamespace() { return targetNamespace; } public DmnDeploymentEntity getDeployment() { return deployment; } public void setDeployment(DmnDeploymentEntity deployment) { this.deployment = deployment; } public DmnDefinition getDmnDefinition() { return dmnDefinition; } public void setDmnDefinition(DmnDefinition dmnDefinition) { this.dmnDefinition = dmnDefinition; } }
stefan-ziel/Activiti
modules/activiti-dmn-engine/src/main/java/org/activiti/dmn/engine/impl/parser/DmnParse.java
Java
apache-2.0
6,382
import application = require("application"); // Specify custom UIApplicationDelegate. /* class MyDelegate extends UIResponder implements UIApplicationDelegate { public static ObjCProtocols = [UIApplicationDelegate]; applicationDidFinishLaunchingWithOptions(application: UIApplication, launchOptions: NSDictionary): boolean { console.log("applicationWillFinishLaunchingWithOptions: " + launchOptions) return true; } applicationDidBecomeActive(application: UIApplication): void { console.log("applicationDidBecomeActive: " + application) } } application.ios.delegate = MyDelegate; */ if (application.ios) { // Observe application notifications. application.ios.addNotificationObserver(UIApplicationDidFinishLaunchingNotification, (notification: NSNotification) => { console.log("UIApplicationDidFinishLaunchingNotification: " + notification) }); } application.mainModule = "app/mainPage"; // Common events for both Android and iOS. application.on(application.launchEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android.content.Intent class. console.log("Launched Android application with the following intent: " + args.android + "."); } else if (args.ios !== undefined) { // For iOS applications, args.ios is NSDictionary (launchOptions). console.log("Launched iOS application with options: " + args.ios); } }); application.on(application.suspendEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.resumeEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.exitEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.lowMemoryEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an android activity class. console.log("Activity: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is UIApplication. console.log("UIApplication: " + args.ios); } }); application.on(application.uncaughtErrorEvent, function (args: application.ApplicationEventData) { if (args.android) { // For Android applications, args.android is an NativeScriptError. console.log("NativeScriptError: " + args.android); } else if (args.ios) { // For iOS applications, args.ios is NativeScriptError. console.log("NativeScriptError: " + args.ios); } }); // Android activity events if (application.android) { application.android.on(application.AndroidApplication.activityCreatedEvent, function (args: application.AndroidActivityBundleEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle); }); application.android.on(application.AndroidApplication.activityDestroyedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityStartedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityPausedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityResumedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.activityStoppedEvent, function (args: application.AndroidActivityEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); }); application.android.on(application.AndroidApplication.saveActivityStateEvent, function (args: application.AndroidActivityBundleEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle); }); application.android.on(application.AndroidApplication.activityResultEvent, function (args: application.AndroidActivityResultEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", requestCode: " + args.requestCode + ", resultCode: " + args.resultCode + ", Intent: " + args.intent); }); application.android.on(application.AndroidApplication.activityBackPressedEvent, function (args: application.AndroidActivityBackPressedEventData) { console.log("Event: " + args.eventName + ", Activity: " + args.activity); // Set args.cancel = true to cancel back navigation and do something custom. }); } application.start();
hdeshev/NativeScript
apps/tests/app/app.ts
TypeScript
apache-2.0
5,921
//// [variadicTuples1.ts] // Variadics in tuple types type TV0<T extends unknown[]> = [string, ...T]; type TV1<T extends unknown[]> = [string, ...T, number]; type TV2<T extends unknown[]> = [string, ...T, number, ...T]; type TV3<T extends unknown[]> = [string, ...T, ...number[], ...T]; // Normalization type TN1 = TV1<[boolean, string]>; type TN2 = TV1<[]>; type TN3 = TV1<[boolean?]>; type TN4 = TV1<string[]>; type TN5 = TV1<[boolean] | [symbol, symbol]>; type TN6 = TV1<any>; type TN7 = TV1<never>; // Variadics in array literals function tup2<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]) { return [1, ...t, 2, ...u, 3] as const; } const t2 = tup2(['hello'], [10, true]); function concat<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): [...T, ...U] { return [...t, ...u]; } declare const sa: string[]; const tc1 = concat([], []); const tc2 = concat(['hello'], [42]); const tc3 = concat([1, 2, 3], sa); const tc4 = concat(sa, [1, 2, 3]); // Ideally would be [...string[], number, number, number] function concat2<T extends readonly unknown[], U extends readonly unknown[]>(t: T, u: U) { return [...t, ...u]; // (T[number] | U[number])[] } const tc5 = concat2([1, 2, 3] as const, [4, 5, 6] as const); // (1 | 2 | 3 | 4 | 5 | 6)[] // Spread arguments declare function foo1(a: number, b: string, c: boolean, ...d: number[]): void; function foo2(t1: [number, string], t2: [boolean], a1: number[]) { foo1(1, 'abc', true, 42, 43, 44); foo1(...t1, true, 42, 43, 44); foo1(...t1, ...t2, 42, 43, 44); foo1(...t1, ...t2, ...a1); foo1(...t1); // Error foo1(...t1, 45); // Error } declare function foo3<T extends unknown[]>(x: number, ...args: [...T, number]): T; function foo4<U extends unknown[]>(u: U) { foo3(1, 2); foo3(1, 'hello', true, 2); foo3(1, ...u, 'hi', 2); foo3(1); } // Contextual typing of array literals declare function ft1<T extends unknown[]>(t: T): T; declare function ft2<T extends unknown[]>(t: T): readonly [...T]; declare function ft3<T extends unknown[]>(t: [...T]): T; declare function ft4<T extends unknown[]>(t: [...T]): readonly [...T]; ft1(['hello', 42]); // (string | number)[] ft2(['hello', 42]); // readonly (string | number)[] ft3(['hello', 42]); // [string, number] ft4(['hello', 42]); // readonly [string, number] // Indexing variadic tuple types function f0<T extends unknown[]>(t: [string, ...T], n: number) { const a = t[0]; // string const b = t[1]; // [string, ...T][1] const c = t[2]; // [string, ...T][2] const d = t[n]; // [string, ...T][number] } function f1<T extends unknown[]>(t: [string, ...T, number], n: number) { const a = t[0]; // string const b = t[1]; // [string, ...T, number][1] const c = t[2]; // [string, ...T, number][2] const d = t[n]; // [string, ...T, number][number] } // Destructuring variadic tuple types function f2<T extends unknown[]>(t: [string, ...T]) { let [...ax] = t; // [string, ...T] let [b1, ...bx] = t; // string, [...T] let [c1, c2, ...cx] = t; // string, [string, ...T][1], T[number][] } function f3<T extends unknown[]>(t: [string, ...T, number]) { let [...ax] = t; // [string, ...T, number] let [b1, ...bx] = t; // string, [...T, number] let [c1, c2, ...cx] = t; // string, [string, ...T, number][1], (number | T[number])[] } // Mapped types applied to variadic tuple types type Arrayify<T> = { [P in keyof T]: T[P][] }; type TM1<U extends unknown[]> = Arrayify<readonly [string, number?, ...U, ...boolean[]]>; // [string[], (number | undefined)[]?, Arrayify<U>, ...boolean[][]] type TP1<T extends unknown[]> = Partial<[string, ...T, number]>; // [string?, Partial<T>, number?] type TP2<T extends unknown[]> = Partial<[string, ...T, ...number[]]>; // [string?, Partial<T>, ...(number | undefined)[]] // Reverse mapping through mapped type applied to variadic tuple type declare function fm1<T extends unknown[]>(t: Arrayify<[string, number, ...T]>): T; let tm1 = fm1([['abc'], [42], [true], ['def']]); // [boolean, string] // Spread of readonly array-like infers mutable array-like declare function fx1<T extends unknown[]>(a: string, ...args: T): T; function gx1<U extends unknown[], V extends readonly unknown[]>(u: U, v: V) { fx1('abc'); // [] fx1('abc', ...u); // U fx1('abc', ...v); // [...V] fx1<U>('abc', ...u); // U fx1<V>('abc', ...v); // Error } declare function fx2<T extends readonly unknown[]>(a: string, ...args: T): T; function gx2<U extends unknown[], V extends readonly unknown[]>(u: U, v: V) { fx2('abc'); // [] fx2('abc', ...u); // U fx2('abc', ...v); // [...V] fx2<U>('abc', ...u); // U fx2<V>('abc', ...v); // V } // Relations involving variadic tuple types function f10<T extends string[], U extends T>(x: [string, ...unknown[]], y: [string, ...T], z: [string, ...U]) { x = y; x = z; y = x; // Error y = z; z = x; // Error z = y; // Error } // For a generic type T, [...T] is assignable to T, T is assignable to readonly [...T], and T is assignable // to [...T] when T is constrained to a mutable array or tuple type. function f11<T extends unknown[]>(t: T, m: [...T], r: readonly [...T]) { t = m; t = r; // Error m = t; m = r; // Error r = t; r = m; } function f12<T extends readonly unknown[]>(t: T, m: [...T], r: readonly [...T]) { t = m; t = r; // Error m = t; // Error m = r; // Error r = t; r = m; } function f13<T extends string[], U extends T>(t0: T, t1: [...T], t2: [...U]) { t0 = t1; t0 = t2; t1 = t0; t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f14<T extends readonly string[], U extends T>(t0: T, t1: [...T], t2: [...U]) { t0 = t1; t0 = t2; t1 = t0; // Error t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f15<T extends string[], U extends T>(k0: keyof T, k1: keyof [...T], k2: keyof [...U], k3: keyof [1, 2, ...T]) { k0 = 'length'; k1 = 'length'; k2 = 'length'; k0 = 'slice'; k1 = 'slice'; k2 = 'slice'; k3 = '0'; k3 = '1'; k3 = '2'; // Error } // Inference between variadic tuple types type First<T extends readonly unknown[]> = T extends readonly [unknown, ...unknown[]] ? T[0] : T[0] | undefined; type DropFirst<T extends readonly unknown[]> = T extends readonly [unknown?, ...infer U] ? U : [...T]; type Last<T extends readonly unknown[]> = T extends readonly [...unknown[], infer U] ? U : T extends readonly [unknown, ...unknown[]] ? T[number] : T[number] | undefined; type DropLast<T extends readonly unknown[]> = T extends readonly [...infer U, unknown] ? U : [...T]; type T00 = First<[number, symbol, string]>; type T01 = First<[symbol, string]>; type T02 = First<[string]>; type T03 = First<[number, symbol, ...string[]]>; type T04 = First<[symbol, ...string[]]>; type T05 = First<[string?]>; type T06 = First<string[]>; type T07 = First<[]>; type T08 = First<any>; type T09 = First<never>; type T10 = DropFirst<[number, symbol, string]>; type T11 = DropFirst<[symbol, string]>; type T12 = DropFirst<[string]>; type T13 = DropFirst<[number, symbol, ...string[]]>; type T14 = DropFirst<[symbol, ...string[]]>; type T15 = DropFirst<[string?]>; type T16 = DropFirst<string[]>; type T17 = DropFirst<[]>; type T18 = DropFirst<any>; type T19 = DropFirst<never>; type T20 = Last<[number, symbol, string]>; type T21 = Last<[symbol, string]>; type T22 = Last<[string]>; type T23 = Last<[number, symbol, ...string[]]>; type T24 = Last<[symbol, ...string[]]>; type T25 = Last<[string?]>; type T26 = Last<string[]>; type T27 = Last<[]>; type T28 = Last<any>; type T29 = Last<never>; type T30 = DropLast<[number, symbol, string]>; type T31 = DropLast<[symbol, string]>; type T32 = DropLast<[string]>; type T33 = DropLast<[number, symbol, ...string[]]>; type T34 = DropLast<[symbol, ...string[]]>; type T35 = DropLast<[string?]>; type T36 = DropLast<string[]>; type T37 = DropLast<[]>; // unknown[], maybe should be [] type T38 = DropLast<any>; type T39 = DropLast<never>; type R00 = First<readonly [number, symbol, string]>; type R01 = First<readonly [symbol, string]>; type R02 = First<readonly [string]>; type R03 = First<readonly [number, symbol, ...string[]]>; type R04 = First<readonly [symbol, ...string[]]>; type R05 = First<readonly string[]>; type R06 = First<readonly []>; type R10 = DropFirst<readonly [number, symbol, string]>; type R11 = DropFirst<readonly [symbol, string]>; type R12 = DropFirst<readonly [string]>; type R13 = DropFirst<readonly [number, symbol, ...string[]]>; type R14 = DropFirst<readonly [symbol, ...string[]]>; type R15 = DropFirst<readonly string[]>; type R16 = DropFirst<readonly []>; type R20 = Last<readonly [number, symbol, string]>; type R21 = Last<readonly [symbol, string]>; type R22 = Last<readonly [string]>; type R23 = Last<readonly [number, symbol, ...string[]]>; type R24 = Last<readonly [symbol, ...string[]]>; type R25 = Last<readonly string[]>; type R26 = Last<readonly []>; type R30 = DropLast<readonly [number, symbol, string]>; type R31 = DropLast<readonly [symbol, string]>; type R32 = DropLast<readonly [string]>; type R33 = DropLast<readonly [number, symbol, ...string[]]>; type R34 = DropLast<readonly [symbol, ...string[]]>; type R35 = DropLast<readonly string[]>; type R36 = DropLast<readonly []>; // Inference to [...T, ...U] with implied arity for T function curry<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, ...a: T) { return (...b: U) => f(...a, ...b); } const fn1 = (a: number, b: string, c: boolean, d: string[]) => 0; const c0 = curry(fn1); // (a: number, b: string, c: boolean, d: string[]) => number const c1 = curry(fn1, 1); // (b: string, c: boolean, d: string[]) => number const c2 = curry(fn1, 1, 'abc'); // (c: boolean, d: string[]) => number const c3 = curry(fn1, 1, 'abc', true); // (d: string[]) => number const c4 = curry(fn1, 1, 'abc', true, ['x', 'y']); // () => number const fn2 = (x: number, b: boolean, ...args: string[]) => 0; const c10 = curry(fn2); // (x: number, b: boolean, ...args: string[]) => number const c11 = curry(fn2, 1); // (b: boolean, ...args: string[]) => number const c12 = curry(fn2, 1, true); // (...args: string[]) => number const c13 = curry(fn2, 1, true, 'abc', 'def'); // (...args: string[]) => number const fn3 = (...args: string[]) => 0; const c20 = curry(fn3); // (...args: string[]) => number const c21 = curry(fn3, 'abc', 'def'); // (...args: string[]) => number const c22 = curry(fn3, ...sa); // (...args: string[]) => number // No inference to [...T, ...U] when there is no implied arity function curry2<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, t: [...T], u: [...U]) { return f(...t, ...u); } declare function fn10(a: string, b: number, c: boolean): string[]; curry2(fn10, ['hello', 42], [true]); curry2(fn10, ['hello'], [42, true]); // Inference to [...T] has higher priority than inference to [...T, number?] declare function ft<T extends unknown[]>(t1: [...T], t2: [...T, number?]): T; ft([1, 2, 3], [1, 2, 3]); ft([1, 2], [1, 2, 3]); ft(['a', 'b'], ['c', 'd']) ft(['a', 'b'], ['c', 'd', 42]) // Last argument is contextually typed declare function call<T extends unknown[], R>(...args: [...T, (...args: T) => R]): [T, R]; call('hello', 32, (a, b) => 42); call(...sa, (...x) => 42); // No inference to ending optional elements (except with identical structure) declare function f20<T extends unknown[] = []>(args: [...T, number?]): T; function f21<U extends string[]>(args: [...U, number?]) { let v1 = f20(args); // U let v2 = f20(["foo", "bar"]); // [string] let v3 = f20(["foo", 42]); // [string] } declare function f22<T extends unknown[] = []>(args: [...T, number]): T; declare function f22<T extends unknown[] = []>(args: [...T]): T; function f23<U extends string[]>(args: [...U, number]) { let v1 = f22(args); // U let v2 = f22(["foo", "bar"]); // [string, string] let v3 = f22(["foo", 42]); // [string] } // Repro from #39327 interface Desc<A extends unknown[], T> { readonly f: (...args: A) => T; bind<T extends unknown[], U extends unknown[], R>(this: Desc<[...T, ...U], R>, ...args: T): Desc<[...U], R>; } declare const a: Desc<[string, number, boolean], object>; const b = a.bind("", 1); // Desc<[boolean], object> // Repro from #39607 declare function getUser(id: string, options?: { x?: string }): string; declare function getOrgUser(id: string, orgId: number, options?: { y?: number, z?: boolean }): void; function callApi<T extends unknown[] = [], U = void>(method: (...args: [...T, object]) => U) { return (...args: [...T]) => method(...args, {}); } callApi(getUser); callApi(getOrgUser); // Repro from #40235 type Numbers = number[]; type Unbounded = [...Numbers, boolean]; const data: Unbounded = [false, false]; // Error type U1 = [string, ...Numbers, boolean]; type U2 = [...[string, ...Numbers], boolean]; type U3 = [...[string, number], boolean]; //// [variadicTuples1.js] "use strict"; // Variadics in tuple types var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { if (ar || !(i in from)) { if (!ar) ar = Array.prototype.slice.call(from, 0, i); ar[i] = from[i]; } } return to.concat(ar || Array.prototype.slice.call(from)); }; // Variadics in array literals function tup2(t, u) { return __spreadArray(__spreadArray(__spreadArray(__spreadArray([1], t, true), [2], false), u, true), [3], false); } var t2 = tup2(['hello'], [10, true]); function concat(t, u) { return __spreadArray(__spreadArray([], t, true), u, true); } var tc1 = concat([], []); var tc2 = concat(['hello'], [42]); var tc3 = concat([1, 2, 3], sa); var tc4 = concat(sa, [1, 2, 3]); // Ideally would be [...string[], number, number, number] function concat2(t, u) { return __spreadArray(__spreadArray([], t, true), u, true); // (T[number] | U[number])[] } var tc5 = concat2([1, 2, 3], [4, 5, 6]); // (1 | 2 | 3 | 4 | 5 | 6)[] function foo2(t1, t2, a1) { foo1(1, 'abc', true, 42, 43, 44); foo1.apply(void 0, __spreadArray(__spreadArray([], t1, false), [true, 42, 43, 44], false)); foo1.apply(void 0, __spreadArray(__spreadArray(__spreadArray([], t1, false), t2, false), [42, 43, 44], false)); foo1.apply(void 0, __spreadArray(__spreadArray(__spreadArray([], t1, false), t2, false), a1, false)); foo1.apply(void 0, t1); // Error foo1.apply(void 0, __spreadArray(__spreadArray([], t1, false), [45], false)); // Error } function foo4(u) { foo3(1, 2); foo3(1, 'hello', true, 2); foo3.apply(void 0, __spreadArray(__spreadArray([1], u, false), ['hi', 2], false)); foo3(1); } ft1(['hello', 42]); // (string | number)[] ft2(['hello', 42]); // readonly (string | number)[] ft3(['hello', 42]); // [string, number] ft4(['hello', 42]); // readonly [string, number] // Indexing variadic tuple types function f0(t, n) { var a = t[0]; // string var b = t[1]; // [string, ...T][1] var c = t[2]; // [string, ...T][2] var d = t[n]; // [string, ...T][number] } function f1(t, n) { var a = t[0]; // string var b = t[1]; // [string, ...T, number][1] var c = t[2]; // [string, ...T, number][2] var d = t[n]; // [string, ...T, number][number] } // Destructuring variadic tuple types function f2(t) { var ax = t.slice(0); // [string, ...T] var b1 = t[0], bx = t.slice(1); // string, [...T] var c1 = t[0], c2 = t[1], cx = t.slice(2); // string, [string, ...T][1], T[number][] } function f3(t) { var ax = t.slice(0); // [string, ...T, number] var b1 = t[0], bx = t.slice(1); // string, [...T, number] var c1 = t[0], c2 = t[1], cx = t.slice(2); // string, [string, ...T, number][1], (number | T[number])[] } var tm1 = fm1([['abc'], [42], [true], ['def']]); // [boolean, string] function gx1(u, v) { fx1('abc'); // [] fx1.apply(void 0, __spreadArray(['abc'], u, false)); // U fx1.apply(void 0, __spreadArray(['abc'], v, false)); // [...V] fx1.apply(void 0, __spreadArray(['abc'], u, false)); // U fx1.apply(void 0, __spreadArray(['abc'], v, false)); // Error } function gx2(u, v) { fx2('abc'); // [] fx2.apply(void 0, __spreadArray(['abc'], u, false)); // U fx2.apply(void 0, __spreadArray(['abc'], v, false)); // [...V] fx2.apply(void 0, __spreadArray(['abc'], u, false)); // U fx2.apply(void 0, __spreadArray(['abc'], v, false)); // V } // Relations involving variadic tuple types function f10(x, y, z) { x = y; x = z; y = x; // Error y = z; z = x; // Error z = y; // Error } // For a generic type T, [...T] is assignable to T, T is assignable to readonly [...T], and T is assignable // to [...T] when T is constrained to a mutable array or tuple type. function f11(t, m, r) { t = m; t = r; // Error m = t; m = r; // Error r = t; r = m; } function f12(t, m, r) { t = m; t = r; // Error m = t; // Error m = r; // Error r = t; r = m; } function f13(t0, t1, t2) { t0 = t1; t0 = t2; t1 = t0; t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f14(t0, t1, t2) { t0 = t1; t0 = t2; t1 = t0; // Error t1 = t2; t2 = t0; // Error t2 = t1; // Error } function f15(k0, k1, k2, k3) { k0 = 'length'; k1 = 'length'; k2 = 'length'; k0 = 'slice'; k1 = 'slice'; k2 = 'slice'; k3 = '0'; k3 = '1'; k3 = '2'; // Error } // Inference to [...T, ...U] with implied arity for T function curry(f) { var a = []; for (var _i = 1; _i < arguments.length; _i++) { a[_i - 1] = arguments[_i]; } return function () { var b = []; for (var _i = 0; _i < arguments.length; _i++) { b[_i] = arguments[_i]; } return f.apply(void 0, __spreadArray(__spreadArray([], a, false), b, false)); }; } var fn1 = function (a, b, c, d) { return 0; }; var c0 = curry(fn1); // (a: number, b: string, c: boolean, d: string[]) => number var c1 = curry(fn1, 1); // (b: string, c: boolean, d: string[]) => number var c2 = curry(fn1, 1, 'abc'); // (c: boolean, d: string[]) => number var c3 = curry(fn1, 1, 'abc', true); // (d: string[]) => number var c4 = curry(fn1, 1, 'abc', true, ['x', 'y']); // () => number var fn2 = function (x, b) { var args = []; for (var _i = 2; _i < arguments.length; _i++) { args[_i - 2] = arguments[_i]; } return 0; }; var c10 = curry(fn2); // (x: number, b: boolean, ...args: string[]) => number var c11 = curry(fn2, 1); // (b: boolean, ...args: string[]) => number var c12 = curry(fn2, 1, true); // (...args: string[]) => number var c13 = curry(fn2, 1, true, 'abc', 'def'); // (...args: string[]) => number var fn3 = function () { var args = []; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } return 0; }; var c20 = curry(fn3); // (...args: string[]) => number var c21 = curry(fn3, 'abc', 'def'); // (...args: string[]) => number var c22 = curry.apply(void 0, __spreadArray([fn3], sa, false)); // (...args: string[]) => number // No inference to [...T, ...U] when there is no implied arity function curry2(f, t, u) { return f.apply(void 0, __spreadArray(__spreadArray([], t, false), u, false)); } curry2(fn10, ['hello', 42], [true]); curry2(fn10, ['hello'], [42, true]); ft([1, 2, 3], [1, 2, 3]); ft([1, 2], [1, 2, 3]); ft(['a', 'b'], ['c', 'd']); ft(['a', 'b'], ['c', 'd', 42]); call('hello', 32, function (a, b) { return 42; }); call.apply(void 0, __spreadArray(__spreadArray([], sa, false), [function () { var x = []; for (var _i = 0; _i < arguments.length; _i++) { x[_i] = arguments[_i]; } return 42; }], false)); function f21(args) { var v1 = f20(args); // U var v2 = f20(["foo", "bar"]); // [string] var v3 = f20(["foo", 42]); // [string] } function f23(args) { var v1 = f22(args); // U var v2 = f22(["foo", "bar"]); // [string, string] var v3 = f22(["foo", 42]); // [string] } var b = a.bind("", 1); // Desc<[boolean], object> function callApi(method) { return function () { var args = []; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } return method.apply(void 0, __spreadArray(__spreadArray([], args, false), [{}], false)); }; } callApi(getUser); callApi(getOrgUser); var data = [false, false]; // Error //// [variadicTuples1.d.ts] declare type TV0<T extends unknown[]> = [string, ...T]; declare type TV1<T extends unknown[]> = [string, ...T, number]; declare type TV2<T extends unknown[]> = [string, ...T, number, ...T]; declare type TV3<T extends unknown[]> = [string, ...T, ...number[], ...T]; declare type TN1 = TV1<[boolean, string]>; declare type TN2 = TV1<[]>; declare type TN3 = TV1<[boolean?]>; declare type TN4 = TV1<string[]>; declare type TN5 = TV1<[boolean] | [symbol, symbol]>; declare type TN6 = TV1<any>; declare type TN7 = TV1<never>; declare function tup2<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): readonly [1, ...T, 2, ...U, 3]; declare const t2: readonly [1, string, 2, number, boolean, 3]; declare function concat<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): [...T, ...U]; declare const sa: string[]; declare const tc1: []; declare const tc2: [string, number]; declare const tc3: [number, number, number, ...string[]]; declare const tc4: [...string[], number, number, number]; declare function concat2<T extends readonly unknown[], U extends readonly unknown[]>(t: T, u: U): (T[number] | U[number])[]; declare const tc5: (2 | 4 | 1 | 3 | 6 | 5)[]; declare function foo1(a: number, b: string, c: boolean, ...d: number[]): void; declare function foo2(t1: [number, string], t2: [boolean], a1: number[]): void; declare function foo3<T extends unknown[]>(x: number, ...args: [...T, number]): T; declare function foo4<U extends unknown[]>(u: U): void; declare function ft1<T extends unknown[]>(t: T): T; declare function ft2<T extends unknown[]>(t: T): readonly [...T]; declare function ft3<T extends unknown[]>(t: [...T]): T; declare function ft4<T extends unknown[]>(t: [...T]): readonly [...T]; declare function f0<T extends unknown[]>(t: [string, ...T], n: number): void; declare function f1<T extends unknown[]>(t: [string, ...T, number], n: number): void; declare function f2<T extends unknown[]>(t: [string, ...T]): void; declare function f3<T extends unknown[]>(t: [string, ...T, number]): void; declare type Arrayify<T> = { [P in keyof T]: T[P][]; }; declare type TM1<U extends unknown[]> = Arrayify<readonly [string, number?, ...U, ...boolean[]]>; declare type TP1<T extends unknown[]> = Partial<[string, ...T, number]>; declare type TP2<T extends unknown[]> = Partial<[string, ...T, ...number[]]>; declare function fm1<T extends unknown[]>(t: Arrayify<[string, number, ...T]>): T; declare let tm1: [boolean, string]; declare function fx1<T extends unknown[]>(a: string, ...args: T): T; declare function gx1<U extends unknown[], V extends readonly unknown[]>(u: U, v: V): void; declare function fx2<T extends readonly unknown[]>(a: string, ...args: T): T; declare function gx2<U extends unknown[], V extends readonly unknown[]>(u: U, v: V): void; declare function f10<T extends string[], U extends T>(x: [string, ...unknown[]], y: [string, ...T], z: [string, ...U]): void; declare function f11<T extends unknown[]>(t: T, m: [...T], r: readonly [...T]): void; declare function f12<T extends readonly unknown[]>(t: T, m: [...T], r: readonly [...T]): void; declare function f13<T extends string[], U extends T>(t0: T, t1: [...T], t2: [...U]): void; declare function f14<T extends readonly string[], U extends T>(t0: T, t1: [...T], t2: [...U]): void; declare function f15<T extends string[], U extends T>(k0: keyof T, k1: keyof [...T], k2: keyof [...U], k3: keyof [1, 2, ...T]): void; declare type First<T extends readonly unknown[]> = T extends readonly [unknown, ...unknown[]] ? T[0] : T[0] | undefined; declare type DropFirst<T extends readonly unknown[]> = T extends readonly [unknown?, ...infer U] ? U : [...T]; declare type Last<T extends readonly unknown[]> = T extends readonly [...unknown[], infer U] ? U : T extends readonly [unknown, ...unknown[]] ? T[number] : T[number] | undefined; declare type DropLast<T extends readonly unknown[]> = T extends readonly [...infer U, unknown] ? U : [...T]; declare type T00 = First<[number, symbol, string]>; declare type T01 = First<[symbol, string]>; declare type T02 = First<[string]>; declare type T03 = First<[number, symbol, ...string[]]>; declare type T04 = First<[symbol, ...string[]]>; declare type T05 = First<[string?]>; declare type T06 = First<string[]>; declare type T07 = First<[]>; declare type T08 = First<any>; declare type T09 = First<never>; declare type T10 = DropFirst<[number, symbol, string]>; declare type T11 = DropFirst<[symbol, string]>; declare type T12 = DropFirst<[string]>; declare type T13 = DropFirst<[number, symbol, ...string[]]>; declare type T14 = DropFirst<[symbol, ...string[]]>; declare type T15 = DropFirst<[string?]>; declare type T16 = DropFirst<string[]>; declare type T17 = DropFirst<[]>; declare type T18 = DropFirst<any>; declare type T19 = DropFirst<never>; declare type T20 = Last<[number, symbol, string]>; declare type T21 = Last<[symbol, string]>; declare type T22 = Last<[string]>; declare type T23 = Last<[number, symbol, ...string[]]>; declare type T24 = Last<[symbol, ...string[]]>; declare type T25 = Last<[string?]>; declare type T26 = Last<string[]>; declare type T27 = Last<[]>; declare type T28 = Last<any>; declare type T29 = Last<never>; declare type T30 = DropLast<[number, symbol, string]>; declare type T31 = DropLast<[symbol, string]>; declare type T32 = DropLast<[string]>; declare type T33 = DropLast<[number, symbol, ...string[]]>; declare type T34 = DropLast<[symbol, ...string[]]>; declare type T35 = DropLast<[string?]>; declare type T36 = DropLast<string[]>; declare type T37 = DropLast<[]>; declare type T38 = DropLast<any>; declare type T39 = DropLast<never>; declare type R00 = First<readonly [number, symbol, string]>; declare type R01 = First<readonly [symbol, string]>; declare type R02 = First<readonly [string]>; declare type R03 = First<readonly [number, symbol, ...string[]]>; declare type R04 = First<readonly [symbol, ...string[]]>; declare type R05 = First<readonly string[]>; declare type R06 = First<readonly []>; declare type R10 = DropFirst<readonly [number, symbol, string]>; declare type R11 = DropFirst<readonly [symbol, string]>; declare type R12 = DropFirst<readonly [string]>; declare type R13 = DropFirst<readonly [number, symbol, ...string[]]>; declare type R14 = DropFirst<readonly [symbol, ...string[]]>; declare type R15 = DropFirst<readonly string[]>; declare type R16 = DropFirst<readonly []>; declare type R20 = Last<readonly [number, symbol, string]>; declare type R21 = Last<readonly [symbol, string]>; declare type R22 = Last<readonly [string]>; declare type R23 = Last<readonly [number, symbol, ...string[]]>; declare type R24 = Last<readonly [symbol, ...string[]]>; declare type R25 = Last<readonly string[]>; declare type R26 = Last<readonly []>; declare type R30 = DropLast<readonly [number, symbol, string]>; declare type R31 = DropLast<readonly [symbol, string]>; declare type R32 = DropLast<readonly [string]>; declare type R33 = DropLast<readonly [number, symbol, ...string[]]>; declare type R34 = DropLast<readonly [symbol, ...string[]]>; declare type R35 = DropLast<readonly string[]>; declare type R36 = DropLast<readonly []>; declare function curry<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, ...a: T): (...b: U) => R; declare const fn1: (a: number, b: string, c: boolean, d: string[]) => number; declare const c0: (a: number, b: string, c: boolean, d: string[]) => number; declare const c1: (b: string, c: boolean, d: string[]) => number; declare const c2: (c: boolean, d: string[]) => number; declare const c3: (d: string[]) => number; declare const c4: () => number; declare const fn2: (x: number, b: boolean, ...args: string[]) => number; declare const c10: (x: number, b: boolean, ...args: string[]) => number; declare const c11: (b: boolean, ...args: string[]) => number; declare const c12: (...b: string[]) => number; declare const c13: (...b: string[]) => number; declare const fn3: (...args: string[]) => number; declare const c20: (...b: string[]) => number; declare const c21: (...b: string[]) => number; declare const c22: (...b: string[]) => number; declare function curry2<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, t: [...T], u: [...U]): R; declare function fn10(a: string, b: number, c: boolean): string[]; declare function ft<T extends unknown[]>(t1: [...T], t2: [...T, number?]): T; declare function call<T extends unknown[], R>(...args: [...T, (...args: T) => R]): [T, R]; declare function f20<T extends unknown[] = []>(args: [...T, number?]): T; declare function f21<U extends string[]>(args: [...U, number?]): void; declare function f22<T extends unknown[] = []>(args: [...T, number]): T; declare function f22<T extends unknown[] = []>(args: [...T]): T; declare function f23<U extends string[]>(args: [...U, number]): void; interface Desc<A extends unknown[], T> { readonly f: (...args: A) => T; bind<T extends unknown[], U extends unknown[], R>(this: Desc<[...T, ...U], R>, ...args: T): Desc<[...U], R>; } declare const a: Desc<[string, number, boolean], object>; declare const b: Desc<[boolean], object>; declare function getUser(id: string, options?: { x?: string; }): string; declare function getOrgUser(id: string, orgId: number, options?: { y?: number; z?: boolean; }): void; declare function callApi<T extends unknown[] = [], U = void>(method: (...args: [...T, object]) => U): (...args_0: T) => U; declare type Numbers = number[]; declare type Unbounded = [...Numbers, boolean]; declare const data: Unbounded; declare type U1 = [string, ...Numbers, boolean]; declare type U2 = [...[string, ...Numbers], boolean]; declare type U3 = [...[string, number], boolean];
Microsoft/TypeScript
tests/baselines/reference/variadicTuples1.js
JavaScript
apache-2.0
30,820
/* Derby - Class org.apache.derbyBuild.MessageVetter Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyBuild; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.text.MessageFormat; import java.util.Enumeration; import java.util.HashSet; import java.util.Properties; import java.util.Set; import java.util.regex.Pattern; /** * Class that checks the message files for common problems. */ public class MessageVetter { /** * <p> * Check all the message translations in the specified directories for * common problems. Assume that all properties files in the directories * are message translations. * </p> * * <p> * If a problem is found, an error will be raised. * </p> * * @param args names of the directories to check */ public static void main(String[] args) throws IOException { FileFilter filter = new FileFilter() { public boolean accept(File pathname) { return pathname.getName().endsWith(".properties"); } }; for (String directory : args) { for (File file : new File(directory).listFiles(filter)) { new MessageVetter(file).vet(); } } } /** * A regular expression that matches a single-quote character that is * neither preceeded nor followed by another single-quote character. Used * by {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to * verify that messages contain two single-quotes in order to produce a * single apostrophe (dictated by {@code java.text.MessageFormat}). */ private static final Pattern LONE_QUOTE_PATTERN = Pattern.compile("^'[^']|[^']'[^']|[^']'$"); /** * A regular expression that matches a single-quote character that have * no adjacent single-quote or curly brace character. Used by * {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to * verify that all single-quotes are either correctly formatted apostrophes * or used for quoting curly braces, as required by * {@code java.text.MessageFormat}. */ private static final Pattern LONE_QUOTE_ALLOWED_PATTERN = Pattern.compile("^'[^'{}]|[^'{}]'[^'{}]|[^'{}]'$"); /** * A set of message identifiers in whose messages single-quotes may legally * appear with no adjacent single-quote character. This will be messages * where the single-quotes are needed to quote curly braces that should * appear literally in the message text. */ private static final Set<String> LONE_QUOTE_ALLOWED = new HashSet<String>(); static { // The IJ help text contains curly braces that need quoting. LONE_QUOTE_ALLOWED.add("IJ_HelpText"); // Some of the DRDA usage messages contain the text {on|off}, which // needs quoting. LONE_QUOTE_ALLOWED.add("DRDA_Usage8.I"); LONE_QUOTE_ALLOWED.add("DRDA_Usage11.I"); LONE_QUOTE_ALLOWED.add("PE_HelpText"); } /** The message file to check. */ private final File file; /** The properties found in the message file. */ private final Properties properties; /** * Create a new {@code MessageVetter} instance. * * @param file the file with the messages to check * @throws IOException if the file cannot be loaded */ private MessageVetter(File file) throws IOException { this.file = file; properties = new Properties(); FileInputStream in = new FileInputStream(file); try { properties.load(in); } finally { in.close(); } } /** * Vet the messages in this file. An error will be raised if an * ill-formatted message is found. */ private void vet() { Enumeration e = properties.propertyNames(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String message = properties.getProperty(key); vetMessage(key, message); } } /** * Vet a specific message. Raise an error if it is not well-formed. * * @param key the message identifier * @param message the message format specifier */ private void vetMessage(String key, String message) { checkSingleQuotes(key, message); checkValidMessageFormat(key, message); } /** * Check that single-quote characters are doubled, as required by * {@code java.text.MessageFormat}. Raise an error otherwise. * * @param key the message identifier * @param message the message format specifier */ private void checkSingleQuotes(String key, String message) { Pattern p; if (LONE_QUOTE_ALLOWED.contains(key)) { // In some messages we allow lone single-quote characters, but // only if they are used to quote curly braces. Use a regular // expression that finds all single-quotes that aren't adjacent to // another single-quote or a curly brace character. p = LONE_QUOTE_ALLOWED_PATTERN; } else { // Otherwise, we don't allow lone single-quote characters at all. p = LONE_QUOTE_PATTERN; } if (p.matcher(message).find()) { throw new AssertionError("Lone single-quote in message " + key + " in " + file + ".\nThis is OK if it is used for quoting " + "special characters in the message. If this is what the " + "character is used for, add an exception in " + getClass().getName() + ".LONE_QUOTE_ALLOWED."); } } /** * Check that a message format specifier is valid. Raise an error if it * is not. * * @param key the message identifier * @param message the message format specifier */ private void checkValidMessageFormat(String key, String message) { try { // See if a MessageFormat instance can be produced based on this // message format specifier. new MessageFormat(message); } catch (Exception e) { AssertionError ae = new AssertionError( "Message " + key + " in " + file + " isn't a valid " + "java.text.MessageFormat pattern."); ae.initCause(e); throw ae; } } }
apache/derby
java/build/org/apache/derbyBuild/MessageVetter.java
Java
apache-2.0
7,276
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.http.parser; import java.io.IOException; import java.io.StringReader; import org.apache.tomcat.util.collections.ConcurrentCache; /** * Caches the results of parsing content-type headers. */ public class MediaTypeCache { private final ConcurrentCache<String,String[]> cache; public MediaTypeCache(int size) { cache = new ConcurrentCache<>(size); } /** * Looks in the cache and returns the cached value if one is present. If no * match exists in the cache, a new parser is created, the input parsed and * the results placed in the cache and returned to the user. * * @param input The content-type header value to parse * @return The results are provided as a two element String array. The * first element is the media type less the charset and * the second element is the charset */ public String[] parse(String input) { String[] result = cache.get(input); if (result != null) { return result; } MediaType m = null; try { m = MediaType.parseMediaType(new StringReader(input)); } catch (IOException e) { // Ignore - return null } if (m != null) { result = new String[] {m.toStringNoCharset(), m.getCharset()}; cache.put(input, result); } return result; } }
IAMTJW/Tomcat-8.5.20
tomcat-8.5.20/java/org/apache/tomcat/util/http/parser/MediaTypeCache.java
Java
apache-2.0
2,331
package io.agrest.it.fixture.cayenne; import io.agrest.it.fixture.cayenne.auto._E15E1; public class E15E1 extends _E15E1 { private static final long serialVersionUID = 1L; }
AbleOne/link-rest
agrest/src/test/java/io/agrest/it/fixture/cayenne/E15E1.java
Java
apache-2.0
183
#pragma once #include "generator/collector_interface.hpp" #include <fstream> #include <functional> #include <memory> #include <string> struct OsmElement; namespace base { class GeoObjectId; } // namespace base namespace generator { namespace cache { class IntermediateDataReaderInterface; } // namespace cache // CollectorTag class collects validated value of a tag and saves it to file with following // format: osmId<tab>tagValue. class CollectorTag : public CollectorInterface { public: using Validator = std::function<bool(std::string const & tagValue)>; explicit CollectorTag(std::string const & filename, std::string const & tagKey, Validator const & validator); // CollectorInterface overrides: std::shared_ptr<CollectorInterface> Clone( std::shared_ptr<cache::IntermediateDataReaderInterface> const & = {}) const override; void Collect(OsmElement const & el) override; void Finish() override; void Merge(CollectorInterface const & collector) override; void MergeInto(CollectorTag & collector) const override; protected: void Save() override; void OrderCollectedData() override; private: std::ofstream m_stream; std::string m_tagKey; Validator m_validator; }; } // namespace generator
mpimenov/omim
generator/collector_tag.hpp
C++
apache-2.0
1,262
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ import collections import time from enum import Enum from pyflink.datastream import TimerService from pyflink.datastream.timerservice import InternalTimer, K, N, InternalTimerService from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend class InternalTimerImpl(InternalTimer[K, N]): def __init__(self, timestamp: int, key: K, namespace: N): self._timestamp = timestamp self._key = key self._namespace = namespace def get_timestamp(self) -> int: return self._timestamp def get_key(self) -> K: return self._key def get_namespace(self) -> N: return self._namespace def __hash__(self): result = int(self._timestamp ^ (self._timestamp >> 32)) result = 31 * result + hash(tuple(self._key)) result = 31 * result + hash(self._namespace) return result def __eq__(self, other): return self.__class__ == other.__class__ and self._timestamp == other._timestamp \ and self._key == other._key and self._namespace == other._namespace class TimerOperandType(Enum): REGISTER_EVENT_TIMER = 0 REGISTER_PROC_TIMER = 1 DELETE_EVENT_TIMER = 2 DELETE_PROC_TIMER = 3 class InternalTimerServiceImpl(InternalTimerService[N]): """ Internal implementation of InternalTimerService. """ def __init__(self, keyed_state_backend: RemoteKeyedStateBackend): self._keyed_state_backend = keyed_state_backend self._current_watermark = None self.timers = collections.OrderedDict() def current_processing_time(self): return int(time.time() * 1000) def current_watermark(self): return self._current_watermark def advance_watermark(self, watermark: int): self._current_watermark = watermark def register_processing_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.REGISTER_PROC_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def register_event_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.REGISTER_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def delete_processing_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.DELETE_PROC_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def delete_event_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.DELETE_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None class TimerServiceImpl(TimerService): """ Internal implementation of TimerService. """ def __init__(self, internal_timer_service: InternalTimerServiceImpl): self._internal = internal_timer_service self.timers = self._internal.timers def current_processing_time(self) -> int: return self._internal.current_processing_time() def current_watermark(self) -> int: return self._internal.current_watermark() def advance_watermark(self, wm): self._internal.advance_watermark(wm) def register_processing_time_timer(self, t: int): self._internal.register_processing_time_timer(None, t) def register_event_time_timer(self, t: int): self._internal.register_event_time_timer(None, t) def delete_processing_time_timer(self, t: int): self._internal.delete_processing_time_timer(None, t) def delete_event_time_timer(self, t: int): self._internal.delete_event_time_timer(None, t)
clarkyzl/flink
flink-python/pyflink/fn_execution/timerservice_impl.py
Python
apache-2.0
4,824
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2019 the original author or authors. */ package org.assertj.core.error; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveDeclaredFields; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveFields; import static org.assertj.core.util.Sets.newLinkedHashSet; import java.util.LinkedHashSet; import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; import org.assertj.core.presentation.Representation; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.test.Player; import org.assertj.core.util.Sets; import org.junit.jupiter.api.Test; /** * Tests for * <code>{@link ShouldOnlyHaveFields#create(Description, Representation)}</code> * * @author Filip Hrisafov */ public class ShouldOnlyHaveFields_create_Test { private static final LinkedHashSet<String> EMPTY_STRING_SET = Sets.<String> newLinkedHashSet(); @Test public void should_create_error_message_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "fields not found:%n" + " <[\"nickname\"]>%n" + "and fields not expected:%n" + " <[\"address\"]>")); } @Test public void should_not_display_unexpected_fields_when_there_are_none_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), EMPTY_STRING_SET); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "but could not find the following fields:%n" + " <[\"nickname\"]>")); } @Test public void should_not_display_fields_not_found_when_there_are_none_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), EMPTY_STRING_SET, newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "but the following fields were unexpected:%n" + " <[\"address\"]>")); } @Test public void should_create_error_message_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "fields not found:%n" + " <[\"nickname\"]>%n" + "and fields not expected:%n" + " <[\"address\"]>")); } @Test public void should_not_display_unexpected_fields_when_there_are_none_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), EMPTY_STRING_SET); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "but could not find the following fields:%n" + " <[\"nickname\"]>")); } @Test public void should_not_display_fields_not_found_when_there_are_none_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), EMPTY_STRING_SET, newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(String.format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "but the following fields were unexpected:%n" + " <[\"address\"]>")); } }
xasx/assertj-core
src/test/java/org/assertj/core/error/ShouldOnlyHaveFields_create_Test.java
Java
apache-2.0
8,163
@extends('dashboard.main') @section('styles') <meta name="lang" content="{{ \Session::get('locale') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/iCheck/skins/square/blue.css') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.css') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/datetimepicker/jquery.datetimepicker.css') }}"> <link rel="stylesheet" href="{{ URL::to('libs/vendor/bootstrap-select/dist/css/bootstrap-select.css') }}"> @endsection @section('scripts') <script src="{{ URL::to('libs/vendor/moment/moment.js') }}"></script> <script src="{{ URL::to('libs/vendor/moment/locale/en-gb.js') }}"></script> <script src="{{ URL::to('libs/dashboard/moment-ru.js') }}"></script> <script src="{{ URL::to('libs/vendor/underscore/underscore.js') }}"></script> <script src="{{ URL::to('libs/dashboard/notify.min.js') }}"></script> <script src="{{ URL::to('libs/vendor/bootstrap-select/dist/js/bootstrap-select.js') }}"></script> <script src="{{ URL::to('libs/vendor/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.js') }}"></script> <script src="{{ URL::to('libs/vendor/datetimepicker/build/jquery.datetimepicker.full.js') }}"></script> <script src="{{ URL::asset('libs/vendor/clndr/src/clndr.js') }}"></script> <script src="{{ URL::asset('libs/vendor/iCheck/icheck.js') }}"></script> <script src="{{ URL::to('libs/dashboard/schedule.js') }}"></script> @endsection @section('navigation') @include('dashboard.components.nav') @endsection @section('body-class', 'page-schedule') @section('content') <div id="full-clndr" class="clearfix"> @include('dashboard.components.clndr') </div> <div id="fountainTextG"><div id="fountainTextG_1" class="fountainTextG">L</div><div id="fountainTextG_2" class="fountainTextG">o</div><div id="fountainTextG_3" class="fountainTextG">a</div><div id="fountainTextG_4" class="fountainTextG">d</div><div id="fountainTextG_5" class="fountainTextG">i</div><div id="fountainTextG_6" class="fountainTextG">n</div><div id="fountainTextG_7" class="fountainTextG">g</div></div> <div class="modal fade" tabindex="-1" role="dialog" id="modal"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title">@lang('dashboard.components.scheduler.modal.modalTitle')</h4> </div> <div class="modal-body"> <form> <div class="form-group"> <label for="Title">@lang('dashboard.components.scheduler.modal.title')</label> <input type="text" class="form-control" id="Title" placeholder="@lang('dashboard.components.scheduler.modal.title')" title="@lang('dashboard.components.scheduler.modal.titleTip')"> </div> <div class="form-group"> <label for="Description">@lang('dashboard.components.scheduler.modal.desc')</label> <input type="text" class="form-control" id="Description" placeholder="@lang('dashboard.components.scheduler.modal.desc')"> </div> <div class="form-group"> <label for="playlist">@lang('dashboard.components.scheduler.modal.playlist')</label> <select class="selectpicker form-control" id="playlist" name="playlist"> <option value="0">@lang('dashboard.components.scheduler.modal.emptyPlaylist')</option> </select> </div> <div class="form-group"> <input type="checkbox" id="repeat-day"> <label for="repeat-day">@lang('dashboard.components.scheduler.modal.repeat.everyDay')</label> </div> <div class="form-group"> <input type="checkbox" id="repeat-month"> <label for="repeat-month">@lang('dashboard.components.scheduler.modal.repeat.everyWeek')</label> <div class="form-group" id="repeat-on"> <input type="checkbox" id="repeat-on-mon"> <label for="repeat-on-mon">@lang('dashboard.components.scheduler.modal.repeat.weeks.mon')</label> <input type="checkbox" id="repeat-on-tue"> <label for="repeat-on-tue">@lang('dashboard.components.scheduler.modal.repeat.weeks.tue')</label> <input type="checkbox" id="repeat-on-wed"> <label for="repeat-on-wed">@lang('dashboard.components.scheduler.modal.repeat.weeks.wed')</label> <input type="checkbox" id="repeat-on-thu"> <label for="repeat-on-thu">@lang('dashboard.components.scheduler.modal.repeat.weeks.thu')</label> <input type="checkbox" id="repeat-on-fri"> <label for="repeat-on-fri">@lang('dashboard.components.scheduler.modal.repeat.weeks.fri')</label> <input type="checkbox" id="repeat-on-sat"> <label for="repeat-on-sat">@lang('dashboard.components.scheduler.modal.repeat.weeks.sat')</label> <input type="checkbox" id="repeat-on-sun"> <label for="repeat-on-sun">@lang('dashboard.components.scheduler.modal.repeat.weeks.sun')</label> </div> </div> <div class="form-group"> <label for="datetimepicker">@lang('dashboard.components.scheduler.modal.datetime')</label> <input type="text" class="form-control" id="datetimepicker" placeholder="Datetime" autocomplete="off"> </div> </form> </div> <div class="modal-footer"> <button type="button" class="btn btn-danger pull-left delete-event">@lang('dashboard.core.buttons.delete')</button> <button type="button" class="btn btn-default" data-dismiss="modal">@lang('dashboard.core.buttons.close')</button> <button type="button" class="btn btn-primary save-changes">@lang('dashboard.core.buttons.save')</button> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </div><!-- /.modal --> @endsection
artemsky/EvartFM
resources/views/dashboard/pages/schedule/index.blade.php
PHP
apache-2.0
6,950
package rule import ( "fmt" "github.com/mgechev/revive/lint" ) // ImportsBlacklistRule lints given else constructs. type ImportsBlacklistRule struct { blacklist map[string]bool } // Apply applies the rule to given file. func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { var failures []lint.Failure if file.IsTest() { return failures // skip, test file } if r.blacklist == nil { r.blacklist = make(map[string]bool, len(arguments)) for _, arg := range arguments { argStr, ok := arg.(string) if !ok { panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg)) } // we add quotes if not present, because when parsed, the value of the AST node, will be quoted if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' { argStr = fmt.Sprintf(`%q`, argStr) } r.blacklist[argStr] = true } } for _, is := range file.AST.Imports { path := is.Path if path != nil && r.blacklist[path.Value] { failures = append(failures, lint.Failure{ Confidence: 1, Failure: "should not use the following blacklisted import: " + path.Value, Node: is, Category: "imports", }) } } return failures } // Name returns the rule name. func (r *ImportsBlacklistRule) Name() string { return "imports-blacklist" }
nalind/buildah-1
tests/tools/vendor/github.com/mgechev/revive/rule/imports-blacklist.go
GO
apache-2.0
1,377
<?php /** * 模板操作 * * [WeEngine System] Copyright (c) 2013 WE7.CC */ defined('IN_IA') or exit('Access Denied'); /** * 导入全局变量,并直接显示模板页内容。 * @var int */ define('TEMPLATE_DISPLAY', 0); /** * 导入全局变量,并返回模板页内容的字符串 * @var int */ define('TEMPLATE_FETCH', 1); /** * 返回模板编译文件的包含路径 * @var int */ define('TEMPLATE_INCLUDEPATH', 2); /** * 缓存输出信息,@todo 未完成 * @var int */ define('TEMPLATE_CACHE', 3); function template($filename, $flag = TEMPLATE_DISPLAY) { global $_W; $source = "{$_W['template']['source']}/{$_W['template']['current']}/{$filename}.html"; // exit($source); if(!is_file($source)) { $source = "{$_W['template']['source']}/default/{$filename}.html"; } if(!is_file($source)) { exit("Error: template source '{$filename}' is not exist!"); } $compile = "{$_W['template']['compile']}/{$_W['template']['current']}/{$filename}.tpl.php"; if (DEVELOPMENT || !is_file($compile) || filemtime($source) > filemtime($compile)) { template_compile($source, $compile); } switch ($flag) { case TEMPLATE_DISPLAY: default: extract($GLOBALS, EXTR_SKIP); include $compile; break; case TEMPLATE_FETCH: extract($GLOBALS, EXTR_SKIP); ob_start(); ob_clean(); include $compile; $contents = ob_get_contents(); ob_clean(); return $contents; break; case TEMPLATE_INCLUDEPATH: return $compile; break; case TEMPLATE_CACHE: exit('暂未支持'); break; } } function template_compile($from, $to) { $path = dirname($to); if (!is_dir($path)) mkdirs($path); $content = template_parse(file_get_contents($from)); file_put_contents($to, $content); } function template_parse($str) { $str = preg_replace('/<!--{(.+?)}-->/s', '{$1}', $str); $str = preg_replace('/{template\s+(.+?)}/', '<?php include template($1, TEMPLATE_INCLUDEPATH);?>', $str); $str = preg_replace('/{php\s+(.+?)}/', '<?php $1?>', $str); $str = preg_replace('/{if\s+(.+?)}/', '<?php if($1) { ?>', $str); $str = preg_replace('/{else}/', '<?php } else { ?>', $str); $str = preg_replace('/{else ?if\s+(.+?)}/', '<?php } else if($1) { ?>', $str); $str = preg_replace('/{\/if}/', '<?php } ?>', $str); $str = preg_replace('/{loop\s+(\S+)\s+(\S+)}/', '<?php if(is_array($1)) { foreach($1 as $2) { ?>', $str); $str = preg_replace('/{loop\s+(\S+)\s+(\S+)\s+(\S+)}/', '<?php if(is_array($1)) { foreach($1 as $2 => $3) { ?>', $str); $str = preg_replace('/{\/loop}/', '<?php } } ?>', $str); $str = preg_replace('/{(\$[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*)}/', '<?php echo $1;?>', $str); $str = preg_replace('/{(\$[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff\[\]\'\"\$]*)}/', '<?php echo $1;?>', $str); $str = preg_replace('/<\?php([^\?]+)\?>/es', "template_addquote('<?php$1?>')", $str); $str = preg_replace('/{([A-Z_\x7f-\xff][A-Z0-9_\x7f-\xff]*)}/s', '<?php echo $1;?>', $str); $str = str_replace('{##', '{', $str); $str = str_replace('##}', '}', $str); $str = "<?php defined('IN_IA') or exit('Access Denied');?>" . $str; return $str; } function template_addquote($code) { $code = preg_replace('/\[([a-zA-Z0-9_\-\.\x7f-\xff]+)\]/s', "['$1']", $code); return str_replace('\\\"', '\"', $code); }
royalwang/saivi
tpl/User/default/common/wxq/source/function/template.func.php
PHP
apache-2.0
3,255
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.diff.impl.settings; import com.intellij.icons.AllIcons; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.ToggleAction; import com.intellij.openapi.editor.Editor; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; /** * The "gear" action allowing to configure merge tool visual preferences, such as displaying whitespaces, line numbers and soft wraps. * * @see DiffMergeSettings */ public class DiffMergeSettingsAction extends ActionGroup { @NotNull private final Collection<Editor> myEditors; @NotNull private final DiffMergeSettings mySettings; public DiffMergeSettingsAction(@NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) { super("Settings", null, AllIcons.General.GearPlain); setPopup(true); myEditors = editors; mySettings = settings; } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { return new AnAction[] { new DiffMergeToggleAction("EditorToggleShowWhitespaces", DiffMergeEditorSetting.WHITESPACES, myEditors, mySettings), new DiffMergeToggleAction("EditorToggleShowLineNumbers", DiffMergeEditorSetting.LINE_NUMBERS, myEditors, mySettings), new DiffMergeToggleAction("EditorToggleShowIndentLines", DiffMergeEditorSetting.INDENT_LINES, myEditors, mySettings), new DiffMergeToggleAction("EditorToggleUseSoftWraps", DiffMergeEditorSetting.SOFT_WRAPS, myEditors, mySettings) }; } private static class DiffMergeToggleAction extends ToggleAction { @NotNull private final DiffMergeEditorSetting mySetting; @NotNull private final Collection<Editor> myEditors; @NotNull private final DiffMergeSettings mySettings; private DiffMergeToggleAction(@NotNull String actionId, @NotNull DiffMergeEditorSetting setting, @NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) { super(ActionsBundle.actionText(actionId), ActionsBundle.actionDescription(actionId), null); mySetting = setting; myEditors = editors; mySettings = settings; } @Override public boolean isSelected(@NotNull AnActionEvent e) { return getPreference(mySetting); } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { setPreference(mySetting, state); for (Editor editor : myEditors) { mySetting.apply(editor, state); } } private void setPreference(DiffMergeEditorSetting preference, boolean state) { mySettings.setPreference(preference, state); } private boolean getPreference(DiffMergeEditorSetting preference) { return mySettings.getPreference(preference); } } }
goodwinnk/intellij-community
platform/platform-impl/src/com/intellij/openapi/diff/impl/settings/DiffMergeSettingsAction.java
Java
apache-2.0
3,098
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.grid.node.local; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.junit.Test; import org.openqa.selenium.Capabilities; import org.openqa.selenium.ImmutableCapabilities; import org.openqa.selenium.events.local.GuavaEventBus; import org.openqa.selenium.grid.data.CreateSessionRequest; import org.openqa.selenium.grid.data.CreateSessionResponse; import org.openqa.selenium.grid.data.Session; import org.openqa.selenium.grid.node.Node; import org.openqa.selenium.grid.testing.TestSessionFactory; import org.openqa.selenium.json.Json; import org.openqa.selenium.remote.ErrorCodes; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.tracing.DefaultTestTracer; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; import static org.openqa.selenium.json.Json.MAP_TYPE; import static org.openqa.selenium.remote.Dialect.OSS; import static org.openqa.selenium.remote.Dialect.W3C; import static org.openqa.selenium.remote.http.Contents.utf8String; import static org.openqa.selenium.remote.http.HttpMethod.POST; public class CreateSessionTest { private final Json json = new Json(); private final Capabilities stereotype = new ImmutableCapabilities("cheese", "brie"); @Test public void shouldAcceptAW3CPayload() throws URISyntaxException { String payload = json.toJson(ImmutableMap.of( "capabilities", ImmutableMap.of( "alwaysMatch", ImmutableMap.of("cheese", "brie")))); HttpRequest request = new HttpRequest(POST, "/session"); request.setContent(utf8String(payload)); URI uri = new URI("http://example.com"); Node node = LocalNode.builder( DefaultTestTracer.createTracer(), new GuavaEventBus(), uri, uri, null) .add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps))) .build(); CreateSessionResponse sessionResponse = node.newSession( new CreateSessionRequest( ImmutableSet.of(W3C), stereotype, ImmutableMap.of())) .orElseThrow(() -> new AssertionError("Unable to create session")); Map<String, Object> all = json.toType( new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8), MAP_TYPE); // Ensure that there's no status field (as this is used by the protocol handshake to determine // whether the session is using the JWP or the W3C dialect. assertThat(all.containsKey("status")).isFalse(); // Now check the fields required by the spec Map<?, ?> value = (Map<?, ?>) all.get("value"); assertThat(value.get("sessionId")).isInstanceOf(String.class); assertThat(value.get("capabilities")).isInstanceOf(Map.class); } @Test public void shouldOnlyAcceptAJWPPayloadIfConfiguredTo() { // TODO: implement shouldOnlyAcceptAJWPPayloadIfConfiguredTo test } @Test public void ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured() { // TODO: implement ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured test } @Test public void ifOnlyJWPPayloadSentResponseShouldBeJWPOnlyIfJWPConfigured() throws URISyntaxException { String payload = json.toJson(ImmutableMap.of( "desiredCapabilities", ImmutableMap.of("cheese", "brie"))); HttpRequest request = new HttpRequest(POST, "/session"); request.setContent(utf8String(payload)); URI uri = new URI("http://example.com"); Node node = LocalNode.builder( DefaultTestTracer.createTracer(), new GuavaEventBus(), uri, uri, null) .add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps))) .build(); CreateSessionResponse sessionResponse = node.newSession( new CreateSessionRequest( ImmutableSet.of(OSS), stereotype, ImmutableMap.of())) .orElseThrow(() -> new AssertionError("Unable to create session")); Map<String, Object> all = json.toType( new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8), MAP_TYPE); // The status field is used by local ends to determine whether or not the session is a JWP one. assertThat(all.get("status")).matches(obj -> ((Number) obj).intValue() == ErrorCodes.SUCCESS); // The session id is a top level field assertThat(all.get("sessionId")).isInstanceOf(String.class); // And the value should contain the capabilities. assertThat(all.get("value")).isInstanceOf(Map.class); } @Test public void shouldPreferUsingTheW3CProtocol() throws URISyntaxException { String payload = json.toJson(ImmutableMap.of( "desiredCapabilities", ImmutableMap.of( "cheese", "brie"), "capabilities", ImmutableMap.of( "alwaysMatch", ImmutableMap.of("cheese", "brie")))); HttpRequest request = new HttpRequest(POST, "/session"); request.setContent(utf8String(payload)); URI uri = new URI("http://example.com"); Node node = LocalNode.builder( DefaultTestTracer.createTracer(), new GuavaEventBus(), uri, uri, null) .add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps))) .build(); CreateSessionResponse sessionResponse = node.newSession( new CreateSessionRequest( ImmutableSet.of(W3C), stereotype, ImmutableMap.of())) .orElseThrow(() -> new AssertionError("Unable to create session")); Map<String, Object> all = json.toType( new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8), MAP_TYPE); // Ensure that there's no status field (as this is used by the protocol handshake to determine // whether the session is using the JWP or the W3C dialect. assertThat(all.containsKey("status")).isFalse(); // Now check the fields required by the spec Map<?, ?> value = (Map<?, ?>) all.get("value"); assertThat(value.get("sessionId")).isInstanceOf(String.class); assertThat(value.get("capabilities")).isInstanceOf(Map.class); } @Test public void sessionDataShouldBeCorrectRegardlessOfPayloadProtocol() { // TODO: implement sessionDataShouldBeCorrectRegardlessOfPayloadProtocol test } @Test public void shouldSupportProtocolConversion() { // TODO: implement shouldSupportProtocolConversion test } }
asolntsev/selenium
java/server/test/org/openqa/selenium/grid/node/local/CreateSessionTest.java
Java
apache-2.0
7,428
package frc.team5333.lib; import java.util.HashMap; /** * A static class that contains all kinds of Launch data for the robot, * such as network ports, current state and more * * @author Jaci */ public class RobotData { /** * A blackboard containing objects that are common throughout the * program, along with their String Identifier */ public static HashMap<String, Object> blackboard = new HashMap<String, Object>(); }
FRC-Team5333/2015-RecycleRush
FRC2015/src/main/java/frc/team5333/lib/RobotData.java
Java
apache-2.0
455
function ChangeTo(to) { if (to == "text") { $('#admincommentlinks').ghide(); $('#admincomment').gshow(); resize('admincomment'); var buttons = document.getElementsByName('admincommentbutton'); for (var i = 0; i < buttons.length; i++) { buttons[i].setAttribute('onclick',"ChangeTo('links'); return false;"); } } else if (to == "links") { ajax.post("ajax.php?action=preview","form", function(response) { $('#admincommentlinks').raw().innerHTML = response; $('#admincomment').ghide(); $('#admincommentlinks').gshow(); var buttons = document.getElementsByName('admincommentbutton'); for (var i = 0; i < buttons.length; i++) { buttons[i].setAttribute('onclick',"ChangeTo('text'); return false;"); } }) } } function UncheckIfDisabled(checkbox) { if (checkbox.disabled) { checkbox.checked = false; } } function AlterParanoia() { // Required Ratio is almost deducible from downloaded, the count of seeding and the count of snatched // we will "warn" the user by automatically checking the required ratio box when they are // revealing that information elsewhere if (!$('input[name=p_ratio]').raw()) { return; } var showDownload = $('input[name=p_downloaded]').raw().checked || ($('input[name=p_uploaded]').raw().checked && $('input[name=p_ratio]').raw().checked); if (($('input[name=p_seeding_c]').raw().checked) && ($('input[name=p_snatched_c]').raw().checked) && showDownload) { $('input[type=checkbox][name=p_requiredratio]').raw().checked = true; $('input[type=checkbox][name=p_requiredratio]').raw().disabled = true; } else { $('input[type=checkbox][name=p_requiredratio]').raw().disabled = false; } $('input[name=p_torrentcomments_l]').raw().disabled = !$('input[name=p_torrentcomments_c]').raw().checked; $('input[name=p_collagecontribs_l]').raw().disabled = !$('input[name=p_collagecontribs_c]').raw().checked; $('input[name=p_requestsfilled_list]').raw().disabled = !($('input[name=p_requestsfilled_count]').raw().checked && $('input[name=p_requestsfilled_bounty]').raw().checked); $('input[name=p_requestsvoted_list]').raw().disabled = !($('input[name=p_requestsvoted_count]').raw().checked && $('input[name=p_requestsvoted_bounty]').raw().checked); $('input[name=p_uploads_l]').raw().disabled = !$('input[name=p_uploads_c]').raw().checked; $('input[name=p_uniquegroups_l]').raw().disabled = !$('input[name=p_uniquegroups_c]').raw().checked; $('input[name=p_perfectflacs_l]').raw().disabled = !$('input[name=p_perfectflacs_c]').raw().checked; $('input[name=p_seeding_l]').raw().disabled = !$('input[name=p_seeding_c]').raw().checked; $('input[name=p_leeching_l]').raw().disabled = !$('input[name=p_leeching_c]').raw().checked; $('input[name=p_snatched_l]').raw().disabled = !$('input[name=p_snatched_c]').raw().checked; UncheckIfDisabled($('input[name=p_torrentcomments_l]').raw()); UncheckIfDisabled($('input[name=p_collagecontribs_l]').raw()); UncheckIfDisabled($('input[name=p_requestsfilled_list]').raw()); UncheckIfDisabled($('input[name=p_requestsvoted_list]').raw()); UncheckIfDisabled($('input[name=p_uploads_l]').raw()); UncheckIfDisabled($('input[name=p_uniquegroups_l]').raw()); UncheckIfDisabled($('input[name=p_perfectflacs_l]').raw()); UncheckIfDisabled($('input[name=p_seeding_l]').raw()); UncheckIfDisabled($('input[name=p_leeching_l]').raw()); UncheckIfDisabled($('input[name=p_snatched_l]').raw()); // unique groups, "Perfect" FLACs and artists added are deducible from the list of uploads if ($('input[name=p_uploads_l]').raw().checked) { $('input[name=p_uniquegroups_c]').raw().checked = true; $('input[name=p_uniquegroups_l]').raw().checked = true; $('input[name=p_uniquegroups_c]').raw().disabled = true; $('input[name=p_uniquegroups_l]').raw().disabled = true; $('input[name=p_perfectflacs_c]').raw().checked = true; $('input[name=p_perfectflacs_l]').raw().checked = true; $('input[name=p_perfectflacs_c]').raw().disabled = true; $('input[name=p_perfectflacs_l]').raw().disabled = true; $('input[type=checkbox][name=p_artistsadded]').raw().checked = true; $('input[type=checkbox][name=p_artistsadded]').raw().disabled = true; } else { $('input[name=p_uniquegroups_c]').raw().disabled = false; $('input[name=p_uniquegroups_l]').raw().checked = false; $('input[name=p_uniquegroups_l]').raw().disabled = true; $('input[name=p_perfectflacs_c]').raw().disabled = false; $('input[type=checkbox][name=p_artistsadded]').raw().disabled = false; } if ($('input[name=p_collagecontribs_l]').raw().checked) { $('input[name=p_collages_c]').raw().disabled = true; $('input[name=p_collages_l]').raw().disabled = true; $('input[name=p_collages_c]').raw().checked = true; $('input[name=p_collages_l]').raw().checked = true; } else { $('input[name=p_collages_c]').raw().disabled = false; $('input[name=p_collages_l]').raw().disabled = !$('input[name=p_collages_c]').raw().checked; UncheckIfDisabled($('input[name=p_collages_l]').raw()); } } function ParanoiaReset(checkbox, drops) { var selects = $('select'); for (var i = 0; i < selects.results(); i++) { if (selects.raw(i).name.match(/^p_/)) { if (drops == 0) { selects.raw(i).selectedIndex = 0; } else if (drops == 1) { selects.raw(i).selectedIndex = selects.raw(i).options.length - 2; } else if (drops == 2) { selects.raw(i).selectedIndex = selects.raw(i).options.length - 1; } AlterParanoia(); } } var checkboxes = $(':checkbox'); for (var i = 0; i < checkboxes.results(); i++) { if (checkboxes.raw(i).name.match(/^p_/) && (checkboxes.raw(i).name != 'p_lastseen')) { if (checkbox == 3) { checkboxes.raw(i).checked = !(checkboxes.raw(i).name.match(/_list$/) || checkboxes.raw(i).name.match(/_l$/)); } else { checkboxes.raw(i).checked = checkbox; } AlterParanoia(); } } } function ParanoiaResetOff() { ParanoiaReset(true, 0); } function ParanoiaResetStats() { ParanoiaReset(3, 0); $('input[name=p_collages_l]').raw().checked = false; } function ParanoiaResetOn() { ParanoiaReset(false, 0); $('input[name=p_collages_c]').raw().checked = false; $('input[name=p_collages_l]').raw().checked = false; } addDOMLoadEvent(AlterParanoia); function ToggleWarningAdjust(selector) { if (selector.options[selector.selectedIndex].value == '---') { $('#ReduceWarningTR').gshow(); $('#ReduceWarning').raw().disabled = false; } else { $('#ReduceWarningTR').ghide(); $('#ReduceWarning').raw().disabled = true; } } addDOMLoadEvent(ToggleIdenticons); function ToggleIdenticons() { var disableAvatars = $('#disableavatars'); if (disableAvatars.size()) { var selected = disableAvatars[0].selectedIndex; if (selected == 2 || selected == 3) { $('#identicons').gshow(); } else { $('#identicons').ghide(); } } } function userform_submit() { if ($('#resetpasskey').is(':checked')) { if (!confirm('Are you sure you want to reset your passkey?')) { return false; } } return formVal(); } function togglePassKey(key) { if ($('#passkey').raw().innerHTML == 'View') { $('#passkey').raw().innerHTML = key; } else { $('#passkey').raw().innerHTML = 'View'; } } function commStats(userid) { $('.user_commstats').html('Loading...'); ajax.get('ajax.php?action=community_stats&userid=' + userid, function(JSONresponse) { var response = JSON.parse(JSONresponse) || false; if (!response || response.status == 'failure') { $('.user_commstats').html('An error occurred'); return; } displayCommStats(response.response); }); } function displayCommStats(stats) { var baseid = '#user_commstats_'; for (x in stats) { if (stats[x] === false) { continue; } switch (x) { case 'leeching': $(baseid + x).html(stats[x]); break; case 'seeding': $(baseid + x).html(stats[x]); break; case 'downloaded': $(baseid + x).html(stats[x]); break; case 'snatched': $(baseid + x).html(stats[x]); break; case 'usnatched': $(baseid + x).html('(' + stats[x] + ')'); break; case 'udownloaded': $(baseid + x).html('(' + stats[x] + ')'); break; case 'seedingperc': $(baseid + x).html('(' + stats[x] + '%)'); break; } } } $(document).ready(function() { $("#random_password").click(function() { var length = 15, charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_+-=<>?", password = ""; for (var i = 0, n = charset.length; i < length; ++i) { password += charset.charAt(Math.floor(Math.random() * n)); } $('#change_password').val(password); }); });
tisnats/tisnats.com
static/functions/user.js
JavaScript
apache-2.0
8,556
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; using System.Reflection.Metadata; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeGen; using Microsoft.CodeAnalysis.Collections; using Microsoft.CodeAnalysis.CSharp.Emit; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Emit; using Microsoft.CodeAnalysis.Symbols; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp { /// <summary> /// The compilation object is an immutable representation of a single invocation of the /// compiler. Although immutable, a compilation is also on-demand, and will realize and cache /// data as necessary. A compilation can produce a new compilation from existing compilation /// with the application of small deltas. In many cases, it is more efficient than creating a /// new compilation from scratch, as the new compilation can reuse information from the old /// compilation. /// </summary> public sealed partial class CSharpCompilation : Compilation { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // // Changes to the public interface of this class should remain synchronized with the VB // version. Do not make any changes to the public interface without making the corresponding // change to the VB version. // // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! internal static readonly ParallelOptions DefaultParallelOptions = new ParallelOptions(); private readonly CSharpCompilationOptions _options; private readonly Lazy<Imports> _globalImports; private readonly Lazy<AliasSymbol> _globalNamespaceAlias; // alias symbol used to resolve "global::". private readonly Lazy<ImplicitNamedTypeSymbol> _scriptClass; private readonly CSharpCompilation _previousSubmission; // All imports (using directives and extern aliases) in syntax trees in this compilation. // NOTE: We need to de-dup since the Imports objects that populate the list may be GC'd // and re-created. private ConcurrentSet<ImportInfo> _lazyImportInfos; // Cache the CLS diagnostics for the whole compilation so they aren't computed repeatedly. // NOTE: Presently, we do not cache the per-tree diagnostics. private ImmutableArray<Diagnostic> _lazyClsComplianceDiagnostics; private Conversions _conversions; internal Conversions Conversions { get { if (_conversions == null) { Interlocked.CompareExchange(ref _conversions, new BuckStopsHereBinder(this).Conversions, null); } return _conversions; } } /// <summary> /// Manages anonymous types declared in this compilation. Unifies types that are structurally equivalent. /// </summary> private readonly AnonymousTypeManager _anonymousTypeManager; private NamespaceSymbol _lazyGlobalNamespace; internal readonly BuiltInOperators builtInOperators; /// <summary> /// The <see cref="SourceAssemblySymbol"/> for this compilation. Do not access directly, use Assembly property /// instead. This field is lazily initialized by ReferenceManager, ReferenceManager.CacheLockObject must be locked /// while ReferenceManager "calculates" the value and assigns it, several threads must not perform duplicate /// "calculation" simultaneously. /// </summary> private SourceAssemblySymbol _lazyAssemblySymbol; /// <summary> /// Holds onto data related to reference binding. /// The manager is shared among multiple compilations that we expect to have the same result of reference binding. /// In most cases this can be determined without performing the binding. If the compilation however contains a circular /// metadata reference (a metadata reference that refers back to the compilation) we need to avoid sharing of the binding results. /// We do so by creating a new reference manager for such compilation. /// </summary> private ReferenceManager _referenceManager; private readonly SyntaxAndDeclarationManager _syntaxAndDeclarations; /// <summary> /// Contains the main method of this assembly, if there is one. /// </summary> private EntryPoint _lazyEntryPoint; /// <summary> /// The set of trees for which a <see cref="CompilationUnitCompletedEvent"/> has been added to the queue. /// </summary> private HashSet<SyntaxTree> _lazyCompilationUnitCompletedTrees; public override string Language { get { return LanguageNames.CSharp; } } public override bool IsCaseSensitive { get { return true; } } /// <summary> /// The options the compilation was created with. /// </summary> public new CSharpCompilationOptions Options { get { return _options; } } internal AnonymousTypeManager AnonymousTypeManager { get { return _anonymousTypeManager; } } internal override CommonAnonymousTypeManager CommonAnonymousTypeManager { get { return AnonymousTypeManager; } } /// <summary> /// True when the compiler is run in "strict" mode, in which it enforces the language specification /// in some cases even at the expense of full compatibility. Such differences typically arise when /// earlier versions of the compiler failed to enforce the full language specification. /// </summary> internal bool FeatureStrictEnabled => Feature("strict") != null; /// <summary> /// The language version that was used to parse the syntax trees of this compilation. /// </summary> public LanguageVersion LanguageVersion { get; } public override INamedTypeSymbol CreateErrorTypeSymbol(INamespaceOrTypeSymbol container, string name, int arity) { return new ExtendedErrorTypeSymbol((NamespaceOrTypeSymbol)container, name, arity, null); } #region Constructors and Factories private static readonly CSharpCompilationOptions s_defaultOptions = new CSharpCompilationOptions(OutputKind.ConsoleApplication); private static readonly CSharpCompilationOptions s_defaultSubmissionOptions = new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary); /// <summary> /// Creates a new compilation from scratch. Methods such as AddSyntaxTrees or AddReferences /// on the returned object will allow to continue building up the Compilation incrementally. /// </summary> /// <param name="assemblyName">Simple assembly name.</param> /// <param name="syntaxTrees">The syntax trees with the source code for the new compilation.</param> /// <param name="references">The references for the new compilation.</param> /// <param name="options">The compiler options to use.</param> /// <returns>A new compilation.</returns> public static CSharpCompilation Create( string assemblyName, IEnumerable<SyntaxTree> syntaxTrees = null, IEnumerable<MetadataReference> references = null, CSharpCompilationOptions options = null) { return Create( assemblyName, options ?? s_defaultOptions, syntaxTrees, references, previousSubmission: null, returnType: null, hostObjectType: null, isSubmission: false); } /// <summary> /// Creates a new compilation that can be used in scripting. /// </summary> public static CSharpCompilation CreateSubmission( string assemblyName, SyntaxTree syntaxTree = null, IEnumerable<MetadataReference> references = null, CSharpCompilationOptions options = null, Compilation previousSubmission = null, Type returnType = null, Type hostObjectType = null) { CheckSubmissionOptions(options); return Create( assemblyName, options ?? s_defaultSubmissionOptions, (syntaxTree != null) ? new[] { syntaxTree } : SpecializedCollections.EmptyEnumerable<SyntaxTree>(), references, (CSharpCompilation)previousSubmission, returnType, hostObjectType, isSubmission: true); } private static CSharpCompilation Create( string assemblyName, CSharpCompilationOptions options, IEnumerable<SyntaxTree> syntaxTrees, IEnumerable<MetadataReference> references, CSharpCompilation previousSubmission, Type returnType, Type hostObjectType, bool isSubmission) { Debug.Assert(options != null); CheckAssemblyName(assemblyName); var validatedReferences = ValidateReferences<CSharpCompilationReference>(references); ValidateSubmissionParameters(previousSubmission, returnType, ref hostObjectType); var compilation = new CSharpCompilation( assemblyName, options, validatedReferences, previousSubmission, returnType, hostObjectType, isSubmission, referenceManager: null, reuseReferenceManager: false, syntaxAndDeclarations: new SyntaxAndDeclarationManager( ImmutableArray<SyntaxTree>.Empty, options.ScriptClassName, options.SourceReferenceResolver, CSharp.MessageProvider.Instance, isSubmission, state: null)); if (syntaxTrees != null) { compilation = compilation.AddSyntaxTrees(syntaxTrees); } Debug.Assert((object)compilation._lazyAssemblySymbol == null); return compilation; } private CSharpCompilation( string assemblyName, CSharpCompilationOptions options, ImmutableArray<MetadataReference> references, CSharpCompilation previousSubmission, Type submissionReturnType, Type hostObjectType, bool isSubmission, ReferenceManager referenceManager, bool reuseReferenceManager, SyntaxAndDeclarationManager syntaxAndDeclarations, AsyncQueue<CompilationEvent> eventQueue = null) : base(assemblyName, references, SyntaxTreeCommonFeatures(syntaxAndDeclarations.ExternalSyntaxTrees), submissionReturnType, hostObjectType, isSubmission, eventQueue) { _wellKnownMemberSignatureComparer = new WellKnownMembersSignatureComparer(this); _options = options; this.builtInOperators = new BuiltInOperators(this); _scriptClass = new Lazy<ImplicitNamedTypeSymbol>(BindScriptClass); _globalImports = new Lazy<Imports>(BindGlobalUsings); _globalNamespaceAlias = new Lazy<AliasSymbol>(CreateGlobalNamespaceAlias); _anonymousTypeManager = new AnonymousTypeManager(this); this.LanguageVersion = CommonLanguageVersion(syntaxAndDeclarations.ExternalSyntaxTrees); if (isSubmission) { Debug.Assert(previousSubmission == null || previousSubmission.HostObjectType == hostObjectType); _previousSubmission = previousSubmission; } else { Debug.Assert(previousSubmission == null && submissionReturnType == null && hostObjectType == null); } if (reuseReferenceManager) { referenceManager.AssertCanReuseForCompilation(this); _referenceManager = referenceManager; } else { _referenceManager = new ReferenceManager( MakeSourceAssemblySimpleName(), this.Options.AssemblyIdentityComparer, observedMetadata: referenceManager?.ObservedMetadata); } _syntaxAndDeclarations = syntaxAndDeclarations; Debug.Assert((object)_lazyAssemblySymbol == null); if (EventQueue != null) EventQueue.Enqueue(new CompilationStartedEvent(this)); } internal override void ValidateDebugEntryPoint(IMethodSymbol debugEntryPoint, DiagnosticBag diagnostics) { Debug.Assert(debugEntryPoint != null); // Debug entry point has to be a method definition from this compilation. var methodSymbol = debugEntryPoint as MethodSymbol; if (methodSymbol?.DeclaringCompilation != this || !methodSymbol.IsDefinition) { diagnostics.Add(ErrorCode.ERR_DebugEntryPointNotSourceMethodDefinition, Location.None); } } private static LanguageVersion CommonLanguageVersion(ImmutableArray<SyntaxTree> syntaxTrees) { LanguageVersion? result = null; foreach (var tree in syntaxTrees) { var version = ((CSharpParseOptions)tree.Options).LanguageVersion; if (result == null) { result = version; } else if (result != version) { throw new ArgumentException(CodeAnalysisResources.InconsistentLanguageVersions, nameof(syntaxTrees)); } } return result ?? CSharpParseOptions.Default.LanguageVersion; } /// <summary> /// Create a duplicate of this compilation with different symbol instances. /// </summary> public new CSharpCompilation Clone() { return new CSharpCompilation( this.AssemblyName, _options, this.ExternalReferences, _previousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, _referenceManager, reuseReferenceManager: true, syntaxAndDeclarations: _syntaxAndDeclarations); } private CSharpCompilation Update( ReferenceManager referenceManager, bool reuseReferenceManager, SyntaxAndDeclarationManager syntaxAndDeclarations) { return new CSharpCompilation( this.AssemblyName, _options, this.ExternalReferences, _previousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, referenceManager, reuseReferenceManager, syntaxAndDeclarations); } /// <summary> /// Creates a new compilation with the specified name. /// </summary> public new CSharpCompilation WithAssemblyName(string assemblyName) { CheckAssemblyName(assemblyName); // Can't reuse references since the source assembly name changed and the referenced symbols might // have internals-visible-to relationship with this compilation or they might had a circular reference // to this compilation. return new CSharpCompilation( assemblyName, _options, this.ExternalReferences, _previousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, _referenceManager, reuseReferenceManager: assemblyName == this.AssemblyName, syntaxAndDeclarations: _syntaxAndDeclarations); } /// <summary> /// Creates a new compilation with the specified references. /// </summary> /// <remarks> /// The new <see cref="CSharpCompilation"/> will query the given <see cref="MetadataReference"/> for the underlying /// metadata as soon as the are needed. /// /// The new compilation uses whatever metadata is currently being provided by the <see cref="MetadataReference"/>. /// E.g. if the current compilation references a metadata file that has changed since the creation of the compilation /// the new compilation is going to use the updated version, while the current compilation will be using the previous (it doesn't change). /// </remarks> public new CSharpCompilation WithReferences(IEnumerable<MetadataReference> references) { // References might have changed, don't reuse reference manager. // Don't even reuse observed metadata - let the manager query for the metadata again. return new CSharpCompilation( this.AssemblyName, _options, ValidateReferences<CSharpCompilationReference>(references), _previousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, referenceManager: null, reuseReferenceManager: false, syntaxAndDeclarations: _syntaxAndDeclarations); } /// <summary> /// Creates a new compilation with the specified references. /// </summary> public new CSharpCompilation WithReferences(params MetadataReference[] references) { return this.WithReferences((IEnumerable<MetadataReference>)references); } /// <summary> /// Creates a new compilation with the specified compilation options. /// </summary> public CSharpCompilation WithOptions(CSharpCompilationOptions options) { var oldOptions = this.Options; bool reuseReferenceManager = oldOptions.CanReuseCompilationReferenceManager(options); bool reuseSyntaxAndDeclarationManager = oldOptions.ScriptClassName == options.ScriptClassName && oldOptions.SourceReferenceResolver == options.SourceReferenceResolver; return new CSharpCompilation( this.AssemblyName, options, this.ExternalReferences, _previousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, _referenceManager, reuseReferenceManager, reuseSyntaxAndDeclarationManager ? _syntaxAndDeclarations : new SyntaxAndDeclarationManager( _syntaxAndDeclarations.ExternalSyntaxTrees, options.ScriptClassName, options.SourceReferenceResolver, _syntaxAndDeclarations.MessageProvider, _syntaxAndDeclarations.IsSubmission, state: null)); } /// <summary> /// Returns a new compilation with the given compilation set as the previous submission. /// </summary> internal CSharpCompilation WithPreviousSubmission(CSharpCompilation newPreviousSubmission) { if (!this.IsSubmission) { throw new InvalidOperationException(CSharpResources.CannotHavePreviousSubmission); } // Reference binding doesn't depend on previous submission so we can reuse it. return new CSharpCompilation( this.AssemblyName, _options, this.ExternalReferences, newPreviousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, _referenceManager, reuseReferenceManager: true, syntaxAndDeclarations: _syntaxAndDeclarations); } /// <summary> /// Returns a new compilation with a given event queue. /// </summary> internal override Compilation WithEventQueue(AsyncQueue<CompilationEvent> eventQueue) { return new CSharpCompilation( this.AssemblyName, _options, this.ExternalReferences, _previousSubmission, this.SubmissionReturnType, this.HostObjectType, this.IsSubmission, _referenceManager, reuseReferenceManager: true, syntaxAndDeclarations: _syntaxAndDeclarations, eventQueue: eventQueue); } #endregion #region Submission internal new CSharpCompilation PreviousSubmission { get { return _previousSubmission; } } // TODO (tomat): consider moving this method to SemanticModel /// <summary> /// Returns the type of the submission return value. /// </summary> /// <returns> /// The type of the last expression of the submission. /// Null if the type of the last expression is unknown (null). /// Void type if the type of the last expression statement is void or /// the submission ends with a declaration or statement that is not an expression statement. /// </returns> /// <remarks> /// Note that the return type is System.Void for both compilations "System.Console.WriteLine();" and "System.Console.WriteLine()", /// and <paramref name="hasValue"/> is <c>False</c> for the former and <c>True</c> for the latter. /// </remarks> /// <param name="hasValue">True if the submission has value, i.e. if it ends with a statement that is an expression statement.</param> /// <exception cref="InvalidOperationException">The compilation doesn't represent a submission (<see cref="Compilation.IsSubmission"/> return false).</exception> internal new TypeSymbol GetSubmissionResultType(out bool hasValue) { if (!IsSubmission) { throw new InvalidOperationException(CSharpResources.ThisCompilationNotInteractive); } hasValue = false; // A submission may be empty or comprised of a single script file. var tree = _syntaxAndDeclarations.ExternalSyntaxTrees.SingleOrDefault(); if (tree == null || tree.Options.Kind != SourceCodeKind.Interactive) { return GetSpecialType(SpecialType.System_Void); } var lastStatement = (GlobalStatementSyntax)tree.GetCompilationUnitRoot().Members.LastOrDefault(decl => decl.Kind() == SyntaxKind.GlobalStatement); if (lastStatement == null || lastStatement.Statement.Kind() != SyntaxKind.ExpressionStatement) { return GetSpecialType(SpecialType.System_Void); } var expressionStatement = (ExpressionStatementSyntax)lastStatement.Statement; if (!expressionStatement.SemicolonToken.IsMissing) { return GetSpecialType(SpecialType.System_Void); } var model = GetSemanticModel(tree); hasValue = true; var expression = expressionStatement.Expression; var info = model.GetTypeInfo(expression); return (TypeSymbol)info.ConvertedType; } #endregion #region Syntax Trees (maintain an ordered list) /// <summary> /// The syntax trees (parsed from source code) that this compilation was created with. /// </summary> public new ImmutableArray<SyntaxTree> SyntaxTrees { get { return _syntaxAndDeclarations.GetLazyState().SyntaxTrees; } } /// <summary> /// Returns true if this compilation contains the specified tree. False otherwise. /// </summary> public new bool ContainsSyntaxTree(SyntaxTree syntaxTree) { var cstree = syntaxTree as SyntaxTree; return cstree != null && _syntaxAndDeclarations.GetLazyState().RootNamespaces.ContainsKey(cstree); } /// <summary> /// Creates a new compilation with additional syntax trees. /// </summary> public new CSharpCompilation AddSyntaxTrees(params SyntaxTree[] trees) { return AddSyntaxTrees((IEnumerable<SyntaxTree>)trees); } /// <summary> /// Creates a new compilation with additional syntax trees. /// </summary> public new CSharpCompilation AddSyntaxTrees(IEnumerable<SyntaxTree> trees) { if (trees == null) { throw new ArgumentNullException(nameof(trees)); } if (trees.IsEmpty()) { return this; } // This HashSet is needed so that we don't allow adding the same tree twice // with a single call to AddSyntaxTrees. Rather than using a separate HashSet, // ReplaceSyntaxTrees can just check against ExternalSyntaxTrees, because we // only allow replacing a single tree at a time. var externalSyntaxTrees = PooledHashSet<SyntaxTree>.GetInstance(); var syntaxAndDeclarations = _syntaxAndDeclarations; externalSyntaxTrees.AddAll(syntaxAndDeclarations.ExternalSyntaxTrees); bool reuseReferenceManager = true; int i = 0; foreach (var tree in trees.Cast<CSharpSyntaxTree>()) { if (tree == null) { throw new ArgumentNullException($"{nameof(trees)}[{i}]"); } if (!tree.HasCompilationUnitRoot) { throw new ArgumentException(CSharpResources.TreeMustHaveARootNodeWith, $"{nameof(trees)}[{i}]"); } if (externalSyntaxTrees.Contains(tree)) { throw new ArgumentException(CSharpResources.SyntaxTreeAlreadyPresent, $"{nameof(trees)}[{i}]"); } if (this.IsSubmission && tree.Options.Kind == SourceCodeKind.Regular) { throw new ArgumentException(CSharpResources.SubmissionCanOnlyInclude, $"{nameof(trees)}[{i}]"); } externalSyntaxTrees.Add(tree); reuseReferenceManager &= !tree.HasReferenceOrLoadDirectives; i++; } externalSyntaxTrees.Free(); if (this.IsSubmission && i > 1) { throw new ArgumentException(CSharpResources.SubmissionCanHaveAtMostOne, nameof(trees)); } syntaxAndDeclarations = syntaxAndDeclarations.AddSyntaxTrees(trees); return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations); } /// <summary> /// Creates a new compilation without the specified syntax trees. Preserves metadata info for use with trees /// added later. /// </summary> public new CSharpCompilation RemoveSyntaxTrees(params SyntaxTree[] trees) { return RemoveSyntaxTrees((IEnumerable<SyntaxTree>)trees); } /// <summary> /// Creates a new compilation without the specified syntax trees. Preserves metadata info for use with trees /// added later. /// </summary> public new CSharpCompilation RemoveSyntaxTrees(IEnumerable<SyntaxTree> trees) { if (trees == null) { throw new ArgumentNullException(nameof(trees)); } if (trees.IsEmpty()) { return this; } var removeSet = PooledHashSet<SyntaxTree>.GetInstance(); // This HashSet is needed so that we don't allow adding the same tree twice // with a single call to AddSyntaxTrees. Rather than using a separate HashSet, // ReplaceSyntaxTrees can just check against ExternalSyntaxTrees, because we // only allow replacing a single tree at a time. var externalSyntaxTrees = PooledHashSet<SyntaxTree>.GetInstance(); var syntaxAndDeclarations = _syntaxAndDeclarations; externalSyntaxTrees.AddAll(syntaxAndDeclarations.ExternalSyntaxTrees); bool reuseReferenceManager = true; int i = 0; foreach (var tree in trees.Cast<CSharpSyntaxTree>()) { if (!externalSyntaxTrees.Contains(tree)) { // Check to make sure this is not a #load'ed tree. var loadedSyntaxTreeMap = syntaxAndDeclarations.GetLazyState().LoadedSyntaxTreeMap; if (SyntaxAndDeclarationManager.IsLoadedSyntaxTree(tree, loadedSyntaxTreeMap)) { throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeFromLoadNoRemoveReplace, tree), $"{nameof(trees)}[{i}]"); } throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, tree), $"{nameof(trees)}[{i}]"); } removeSet.Add(tree); reuseReferenceManager &= !tree.HasReferenceOrLoadDirectives; i++; } externalSyntaxTrees.Free(); syntaxAndDeclarations = syntaxAndDeclarations.RemoveSyntaxTrees(removeSet); removeSet.Free(); return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations); } /// <summary> /// Creates a new compilation without any syntax trees. Preserves metadata info /// from this compilation for use with trees added later. /// </summary> public new CSharpCompilation RemoveAllSyntaxTrees() { var syntaxAndDeclarations = _syntaxAndDeclarations; return Update( _referenceManager, reuseReferenceManager: !syntaxAndDeclarations.MayHaveReferenceDirectives(), syntaxAndDeclarations: syntaxAndDeclarations.WithExternalSyntaxTrees(ImmutableArray<SyntaxTree>.Empty)); } /// <summary> /// Creates a new compilation without the old tree but with the new tree. /// </summary> public new CSharpCompilation ReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree) { // this is just to force a cast exception oldTree = (CSharpSyntaxTree)oldTree; newTree = (CSharpSyntaxTree)newTree; if (oldTree == null) { throw new ArgumentNullException(nameof(oldTree)); } if (newTree == null) { return this.RemoveSyntaxTrees(oldTree); } else if (newTree == oldTree) { return this; } if (!newTree.HasCompilationUnitRoot) { throw new ArgumentException(CSharpResources.TreeMustHaveARootNodeWith, nameof(newTree)); } var syntaxAndDeclarations = _syntaxAndDeclarations; var externalSyntaxTrees = syntaxAndDeclarations.ExternalSyntaxTrees; if (!externalSyntaxTrees.Contains(oldTree)) { // Check to see if this is a #load'ed tree. var loadedSyntaxTreeMap = syntaxAndDeclarations.GetLazyState().LoadedSyntaxTreeMap; if (SyntaxAndDeclarationManager.IsLoadedSyntaxTree(oldTree, loadedSyntaxTreeMap)) { throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeFromLoadNoRemoveReplace, oldTree), nameof(oldTree)); } throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, oldTree), nameof(oldTree)); } if (externalSyntaxTrees.Contains(newTree)) { throw new ArgumentException(CSharpResources.SyntaxTreeAlreadyPresent, nameof(newTree)); } // TODO(tomat): Consider comparing #r's of the old and the new tree. If they are exactly the same we could still reuse. // This could be a perf win when editing a script file in the IDE. The services create a new compilation every keystroke // that replaces the tree with a new one. var reuseReferenceManager = !oldTree.HasReferenceOrLoadDirectives() && !newTree.HasReferenceOrLoadDirectives(); syntaxAndDeclarations = syntaxAndDeclarations.ReplaceSyntaxTree(oldTree, newTree); return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations); } internal override int GetSyntaxTreeOrdinal(SyntaxTree tree) { Debug.Assert(this.ContainsSyntaxTree(tree)); return _syntaxAndDeclarations.GetLazyState().OrdinalMap[tree]; } #endregion #region References internal override CommonReferenceManager CommonGetBoundReferenceManager() { return GetBoundReferenceManager(); } internal new ReferenceManager GetBoundReferenceManager() { if ((object)_lazyAssemblySymbol == null) { _referenceManager.CreateSourceAssemblyForCompilation(this); Debug.Assert((object)_lazyAssemblySymbol != null); } // referenceManager can only be accessed after we initialized the lazyAssemblySymbol. // In fact, initialization of the assembly symbol might change the reference manager. return _referenceManager; } // for testing only: internal bool ReferenceManagerEquals(CSharpCompilation other) { return ReferenceEquals(_referenceManager, other._referenceManager); } public override ImmutableArray<MetadataReference> DirectiveReferences { get { return GetBoundReferenceManager().DirectiveReferences; } } internal override IDictionary<string, MetadataReference> ReferenceDirectiveMap { get { return GetBoundReferenceManager().ReferenceDirectiveMap; } } // for testing purposes internal IEnumerable<string> ExternAliases { get { return GetBoundReferenceManager().ExternAliases; } } /// <summary> /// Gets the <see cref="AssemblySymbol"/> or <see cref="ModuleSymbol"/> for a metadata reference used to create this compilation. /// </summary> /// <returns><see cref="AssemblySymbol"/> or <see cref="ModuleSymbol"/> corresponding to the given reference or null if there is none.</returns> /// <remarks> /// Uses object identity when comparing two references. /// </remarks> internal new Symbol GetAssemblyOrModuleSymbol(MetadataReference reference) { if (reference == null) { throw new ArgumentNullException(nameof(reference)); } if (reference.Properties.Kind == MetadataImageKind.Assembly) { return GetBoundReferenceManager().GetReferencedAssemblySymbol(reference); } else { Debug.Assert(reference.Properties.Kind == MetadataImageKind.Module); int index = GetBoundReferenceManager().GetReferencedModuleIndex(reference); return index < 0 ? null : this.Assembly.Modules[index]; } } public override IEnumerable<AssemblyIdentity> ReferencedAssemblyNames { get { return Assembly.Modules.SelectMany(module => module.GetReferencedAssemblies()); } } /// <summary> /// All reference directives used in this compilation. /// </summary> internal override IEnumerable<ReferenceDirective> ReferenceDirectives { get { return this.Declarations.ReferenceDirectives; } } /// <summary> /// Returns a metadata reference that a given #r resolves to. /// </summary> /// <param name="directive">#r directive.</param> /// <returns>Metadata reference the specified directive resolves to.</returns> public MetadataReference GetDirectiveReference(ReferenceDirectiveTriviaSyntax directive) { return ReferenceDirectiveMap[directive.File.ValueText]; } /// <summary> /// Creates a new compilation with additional metadata references. /// </summary> public new CSharpCompilation AddReferences(params MetadataReference[] references) { return (CSharpCompilation)base.AddReferences(references); } /// <summary> /// Creates a new compilation with additional metadata references. /// </summary> public new CSharpCompilation AddReferences(IEnumerable<MetadataReference> references) { return (CSharpCompilation)base.AddReferences(references); } /// <summary> /// Creates a new compilation without the specified metadata references. /// </summary> public new CSharpCompilation RemoveReferences(params MetadataReference[] references) { return (CSharpCompilation)base.RemoveReferences(references); } /// <summary> /// Creates a new compilation without the specified metadata references. /// </summary> public new CSharpCompilation RemoveReferences(IEnumerable<MetadataReference> references) { return (CSharpCompilation)base.RemoveReferences(references); } /// <summary> /// Creates a new compilation without any metadata references /// </summary> public new CSharpCompilation RemoveAllReferences() { return (CSharpCompilation)base.RemoveAllReferences(); } /// <summary> /// Creates a new compilation with an old metadata reference replaced with a new metadata reference. /// </summary> public new CSharpCompilation ReplaceReference(MetadataReference oldReference, MetadataReference newReference) { return (CSharpCompilation)base.ReplaceReference(oldReference, newReference); } public override CompilationReference ToMetadataReference(ImmutableArray<string> aliases = default(ImmutableArray<string>), bool embedInteropTypes = false) { return new CSharpCompilationReference(this, aliases, embedInteropTypes); } /// <summary> /// Get all modules in this compilation, including the source module, added modules, and all /// modules of referenced assemblies that do not come from an assembly with an extern alias. /// Metadata imported from aliased assemblies is not visible at the source level except through /// the use of an extern alias directive. So exclude them from this list which is used to construct /// the global namespace. /// </summary> private void GetAllUnaliasedModules(ArrayBuilder<ModuleSymbol> modules) { // NOTE: This includes referenced modules - they count as modules of the compilation assembly. modules.AddRange(Assembly.Modules); var referenceManager = GetBoundReferenceManager(); for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++) { if (referenceManager.DeclarationsAccessibleWithoutAlias(i)) { modules.AddRange(referenceManager.ReferencedAssemblies[i].Modules); } } } /// <summary> /// Return a list of assembly symbols than can be accessed without using an alias. /// For example: /// 1) /r:A.dll /r:B.dll -> A, B /// 2) /r:Foo=A.dll /r:B.dll -> B /// 3) /r:Foo=A.dll /r:A.dll -> A /// </summary> internal void GetUnaliasedReferencedAssemblies(ArrayBuilder<AssemblySymbol> assemblies) { var referenceManager = GetBoundReferenceManager(); for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++) { if (referenceManager.DeclarationsAccessibleWithoutAlias(i)) { assemblies.Add(referenceManager.ReferencedAssemblies[i]); } } } /// <summary> /// Gets the <see cref="MetadataReference"/> that corresponds to the assembly symbol. /// </summary> public new MetadataReference GetMetadataReference(IAssemblySymbol assemblySymbol) { return base.GetMetadataReference(assemblySymbol); } #endregion #region Symbols /// <summary> /// The AssemblySymbol that represents the assembly being created. /// </summary> internal SourceAssemblySymbol SourceAssembly { get { GetBoundReferenceManager(); return _lazyAssemblySymbol; } } /// <summary> /// The AssemblySymbol that represents the assembly being created. /// </summary> internal new AssemblySymbol Assembly { get { return SourceAssembly; } } /// <summary> /// Get a ModuleSymbol that refers to the module being created by compiling all of the code. /// By getting the GlobalNamespace property of that module, all of the namespaces and types /// defined in source code can be obtained. /// </summary> internal new ModuleSymbol SourceModule { get { return Assembly.Modules[0]; } } /// <summary> /// Gets the root namespace that contains all namespaces and types defined in source code or in /// referenced metadata, merged into a single namespace hierarchy. /// </summary> internal new NamespaceSymbol GlobalNamespace { get { if ((object)_lazyGlobalNamespace == null) { // Get the root namespace from each module, and merge them all together // Get all modules in this compilation, ones referenced directly by the compilation // as well as those referenced by all referenced assemblies. var modules = ArrayBuilder<ModuleSymbol>.GetInstance(); GetAllUnaliasedModules(modules); var result = MergedNamespaceSymbol.Create( new NamespaceExtent(this), null, modules.SelectDistinct(m => m.GlobalNamespace)); modules.Free(); Interlocked.CompareExchange(ref _lazyGlobalNamespace, result, null); } return _lazyGlobalNamespace; } } /// <summary> /// Given for the specified module or assembly namespace, gets the corresponding compilation /// namespace (merged namespace representation for all namespace declarations and references /// with contributions for the namespaceSymbol). Can return null if no corresponding /// namespace can be bound in this compilation with the same name. /// </summary> internal new NamespaceSymbol GetCompilationNamespace(INamespaceSymbol namespaceSymbol) { if (namespaceSymbol is NamespaceSymbol && namespaceSymbol.NamespaceKind == NamespaceKind.Compilation && namespaceSymbol.ContainingCompilation == this) { return (NamespaceSymbol)namespaceSymbol; } var containingNamespace = namespaceSymbol.ContainingNamespace; if (containingNamespace == null) { return this.GlobalNamespace; } var current = GetCompilationNamespace(containingNamespace); if ((object)current != null) { return current.GetNestedNamespace(namespaceSymbol.Name); } return null; } private ConcurrentDictionary<string, NamespaceSymbol> _externAliasTargets; internal bool GetExternAliasTarget(string aliasName, out NamespaceSymbol @namespace) { if (_externAliasTargets == null) { Interlocked.CompareExchange(ref _externAliasTargets, new ConcurrentDictionary<string, NamespaceSymbol>(), null); } else if (_externAliasTargets.TryGetValue(aliasName, out @namespace)) { return !(@namespace is MissingNamespaceSymbol); } ArrayBuilder<NamespaceSymbol> builder = null; var referenceManager = GetBoundReferenceManager(); for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++) { if (referenceManager.AliasesOfReferencedAssemblies[i].Contains(aliasName)) { builder = builder ?? ArrayBuilder<NamespaceSymbol>.GetInstance(); builder.Add(referenceManager.ReferencedAssemblies[i].GlobalNamespace); } } bool foundNamespace = builder != null; // We want to cache failures as well as successes so that subsequent incorrect extern aliases with the // same alias will have the same target. @namespace = foundNamespace ? MergedNamespaceSymbol.Create(new NamespaceExtent(this), namespacesToMerge: builder.ToImmutableAndFree(), containingNamespace: null, nameOpt: null) : new MissingNamespaceSymbol(new MissingModuleSymbol(new MissingAssemblySymbol(new AssemblyIdentity(System.Guid.NewGuid().ToString())), ordinal: -1)); // Use GetOrAdd in case another thread beat us to the punch (i.e. should return the same object for the same alias, every time). @namespace = _externAliasTargets.GetOrAdd(aliasName, @namespace); Debug.Assert(foundNamespace == !(@namespace is MissingNamespaceSymbol)); return foundNamespace; } /// <summary> /// A symbol representing the implicit Script class. This is null if the class is not /// defined in the compilation. /// </summary> internal new NamedTypeSymbol ScriptClass { get { return _scriptClass.Value; } } /// <summary> /// Resolves a symbol that represents script container (Script class). Uses the /// full name of the container class stored in <see cref="CompilationOptions.ScriptClassName"/> to find the symbol. /// </summary> /// <returns>The Script class symbol or null if it is not defined.</returns> private ImplicitNamedTypeSymbol BindScriptClass() { if (_options.ScriptClassName == null || !_options.ScriptClassName.IsValidClrTypeName()) { return null; } var namespaceOrType = this.Assembly.GlobalNamespace.GetNamespaceOrTypeByQualifiedName(_options.ScriptClassName.Split('.')).AsSingleton(); return namespaceOrType as ImplicitNamedTypeSymbol; } internal Imports GlobalImports { get { return _globalImports.Value; } } internal IEnumerable<NamespaceOrTypeSymbol> GlobalUsings { get { return GlobalImports.Usings.Select(u => u.NamespaceOrType); } } internal AliasSymbol GlobalNamespaceAlias { get { return _globalNamespaceAlias.Value; } } /// <summary> /// Get the symbol for the predefined type from the COR Library referenced by this compilation. /// </summary> internal new NamedTypeSymbol GetSpecialType(SpecialType specialType) { if (specialType <= SpecialType.None || specialType > SpecialType.Count) { throw new ArgumentOutOfRangeException(nameof(specialType)); } var result = Assembly.GetSpecialType(specialType); Debug.Assert(result.SpecialType == specialType); return result; } /// <summary> /// Get the symbol for the predefined type member from the COR Library referenced by this compilation. /// </summary> internal Symbol GetSpecialTypeMember(SpecialMember specialMember) { return Assembly.GetSpecialTypeMember(specialMember); } internal TypeSymbol GetTypeByReflectionType(Type type, DiagnosticBag diagnostics) { var result = Assembly.GetTypeByReflectionType(type, includeReferences: true); if ((object)result == null) { var errorType = new ExtendedErrorTypeSymbol(this, type.Name, 0, CreateReflectionTypeNotFoundError(type)); diagnostics.Add(errorType.ErrorInfo, NoLocation.Singleton); result = errorType; } return result; } private static CSDiagnosticInfo CreateReflectionTypeNotFoundError(Type type) { // The type or namespace name '{0}' could not be found in the global namespace (are you missing an assembly reference?) return new CSDiagnosticInfo( ErrorCode.ERR_GlobalSingleTypeNameNotFound, new object[] { type.AssemblyQualifiedName }, ImmutableArray<Symbol>.Empty, ImmutableArray<Location>.Empty ); } // The type of host object model if available. private TypeSymbol _lazyHostObjectTypeSymbol; internal TypeSymbol GetHostObjectTypeSymbol() { if (HostObjectType != null && (object)_lazyHostObjectTypeSymbol == null) { TypeSymbol symbol = Assembly.GetTypeByReflectionType(HostObjectType, includeReferences: true); if ((object)symbol == null) { MetadataTypeName mdName = MetadataTypeName.FromNamespaceAndTypeName(HostObjectType.Namespace ?? String.Empty, HostObjectType.Name, useCLSCompliantNameArityEncoding: true); symbol = new MissingMetadataTypeSymbol.TopLevelWithCustomErrorInfo( new MissingAssemblySymbol(AssemblyIdentity.FromAssemblyDefinition(HostObjectType.GetTypeInfo().Assembly)).Modules[0], ref mdName, CreateReflectionTypeNotFoundError(HostObjectType), SpecialType.None); } Interlocked.CompareExchange(ref _lazyHostObjectTypeSymbol, symbol, null); } return _lazyHostObjectTypeSymbol; } internal SynthesizedInteractiveInitializerMethod GetSubmissionInitializer() { return (IsSubmission && (object)ScriptClass != null) ? ScriptClass.GetScriptInitializer() : null; } /// <summary> /// Gets the type within the compilation's assembly and all referenced assemblies (other than /// those that can only be referenced via an extern alias) using its canonical CLR metadata name. /// </summary> internal new NamedTypeSymbol GetTypeByMetadataName(string fullyQualifiedMetadataName) { return this.Assembly.GetTypeByMetadataName(fullyQualifiedMetadataName, includeReferences: true, isWellKnownType: false); } /// <summary> /// The TypeSymbol for the type 'dynamic' in this Compilation. /// </summary> internal new TypeSymbol DynamicType { get { return AssemblySymbol.DynamicType; } } /// <summary> /// The NamedTypeSymbol for the .NET System.Object type, which could have a TypeKind of /// Error if there was no COR Library in this Compilation. /// </summary> internal new NamedTypeSymbol ObjectType { get { return this.Assembly.ObjectType; } } internal bool DeclaresTheObjectClass { get { return SourceAssembly.DeclaresTheObjectClass; } } internal new MethodSymbol GetEntryPoint(CancellationToken cancellationToken) { EntryPoint entryPoint = GetEntryPointAndDiagnostics(cancellationToken); return entryPoint == null ? null : entryPoint.MethodSymbol; } internal EntryPoint GetEntryPointAndDiagnostics(CancellationToken cancellationToken) { if (!this.Options.OutputKind.IsApplication() && ((object)this.ScriptClass == null)) { return null; } if (this.Options.MainTypeName != null && !this.Options.MainTypeName.IsValidClrTypeName()) { Debug.Assert(!this.Options.Errors.IsDefaultOrEmpty); return new EntryPoint(null, ImmutableArray<Diagnostic>.Empty); } if (_lazyEntryPoint == null) { ImmutableArray<Diagnostic> diagnostics; var entryPoint = FindEntryPoint(cancellationToken, out diagnostics); Interlocked.CompareExchange(ref _lazyEntryPoint, new EntryPoint(entryPoint, diagnostics), null); } return _lazyEntryPoint; } private MethodSymbol FindEntryPoint(CancellationToken cancellationToken, out ImmutableArray<Diagnostic> sealedDiagnostics) { var diagnostics = DiagnosticBag.GetInstance(); var entryPointCandidates = ArrayBuilder<MethodSymbol>.GetInstance(); try { NamedTypeSymbol mainType; string mainTypeName = this.Options.MainTypeName; NamespaceSymbol globalNamespace = this.SourceModule.GlobalNamespace; if (mainTypeName != null) { // Global code is the entry point, ignore all other Mains. var scriptClass = this.ScriptClass; if (scriptClass != null) { // CONSIDER: we could use the symbol instead of just the name. diagnostics.Add(ErrorCode.WRN_MainIgnored, NoLocation.Singleton, mainTypeName); return scriptClass.GetScriptEntryPoint(); } var mainTypeOrNamespace = globalNamespace.GetNamespaceOrTypeByQualifiedName(mainTypeName.Split('.')).OfMinimalArity(); if ((object)mainTypeOrNamespace == null) { diagnostics.Add(ErrorCode.ERR_MainClassNotFound, NoLocation.Singleton, mainTypeName); return null; } mainType = mainTypeOrNamespace as NamedTypeSymbol; if ((object)mainType == null || mainType.IsGenericType || (mainType.TypeKind != TypeKind.Class && mainType.TypeKind != TypeKind.Struct)) { diagnostics.Add(ErrorCode.ERR_MainClassNotClass, mainTypeOrNamespace.Locations.First(), mainTypeOrNamespace); return null; } EntryPointCandidateFinder.FindCandidatesInSingleType(mainType, entryPointCandidates, cancellationToken); } else { mainType = null; EntryPointCandidateFinder.FindCandidatesInNamespace(globalNamespace, entryPointCandidates, cancellationToken); // Global code is the entry point, ignore all other Mains. var scriptClass = this.ScriptClass; if (scriptClass != null) { foreach (var main in entryPointCandidates) { diagnostics.Add(ErrorCode.WRN_MainIgnored, main.Locations.First(), main); } return scriptClass.GetScriptEntryPoint(); } } DiagnosticBag warnings = DiagnosticBag.GetInstance(); var viableEntryPoints = ArrayBuilder<MethodSymbol>.GetInstance(); foreach (var candidate in entryPointCandidates) { if (!candidate.HasEntryPointSignature()) { // a single error for partial methods: warnings.Add(ErrorCode.WRN_InvalidMainSig, candidate.Locations.First(), candidate); continue; } if (candidate.IsGenericMethod || candidate.ContainingType.IsGenericType) { // a single error for partial methods: warnings.Add(ErrorCode.WRN_MainCantBeGeneric, candidate.Locations.First(), candidate); continue; } if (candidate.IsAsync) { diagnostics.Add(ErrorCode.ERR_MainCantBeAsync, candidate.Locations.First(), candidate); } viableEntryPoints.Add(candidate); } if ((object)mainType == null || viableEntryPoints.Count == 0) { diagnostics.AddRange(warnings); } warnings.Free(); MethodSymbol entryPoint = null; if (viableEntryPoints.Count == 0) { if ((object)mainType == null) { diagnostics.Add(ErrorCode.ERR_NoEntryPoint, NoLocation.Singleton); } else { diagnostics.Add(ErrorCode.ERR_NoMainInClass, mainType.Locations.First(), mainType); } } else if (viableEntryPoints.Count > 1) { viableEntryPoints.Sort(LexicalOrderSymbolComparer.Instance); var info = new CSDiagnosticInfo( ErrorCode.ERR_MultipleEntryPoints, args: SpecializedCollections.EmptyArray<object>(), symbols: viableEntryPoints.OfType<Symbol>().AsImmutable(), additionalLocations: viableEntryPoints.Select(m => m.Locations.First()).OfType<Location>().AsImmutable()); diagnostics.Add(new CSDiagnostic(info, viableEntryPoints.First().Locations.First())); } else { entryPoint = viableEntryPoints[0]; } viableEntryPoints.Free(); return entryPoint; } finally { entryPointCandidates.Free(); sealedDiagnostics = diagnostics.ToReadOnlyAndFree(); } } internal class EntryPoint { public readonly MethodSymbol MethodSymbol; public readonly ImmutableArray<Diagnostic> Diagnostics; public EntryPoint(MethodSymbol methodSymbol, ImmutableArray<Diagnostic> diagnostics) { this.MethodSymbol = methodSymbol; this.Diagnostics = diagnostics; } } internal bool MightContainNoPiaLocalTypes() { return SourceAssembly.MightContainNoPiaLocalTypes(); } // NOTE(cyrusn): There is a bit of a discoverability problem with this method and the same // named method in SyntaxTreeSemanticModel. Technically, i believe these are the appropriate // locations for these methods. This method has no dependencies on anything but the // compilation, while the other method needs a bindings object to determine what bound node // an expression syntax binds to. Perhaps when we document these methods we should explain // where a user can find the other. public Conversion ClassifyConversion(ITypeSymbol source, ITypeSymbol destination) { // Note that it is possible for there to be both an implicit user-defined conversion // and an explicit built-in conversion from source to destination. In that scenario // this method returns the implicit conversion. if ((object)source == null) { throw new ArgumentNullException(nameof(source)); } if ((object)destination == null) { throw new ArgumentNullException(nameof(destination)); } var cssource = source.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("source"); var csdest = destination.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("destination"); HashSet<DiagnosticInfo> useSiteDiagnostics = null; return Conversions.ClassifyConversion(cssource, csdest, ref useSiteDiagnostics); } /// <summary> /// Returns a new ArrayTypeSymbol representing an array type tied to the base types of the /// COR Library in this Compilation. /// </summary> internal ArrayTypeSymbol CreateArrayTypeSymbol(TypeSymbol elementType, int rank = 1) { if ((object)elementType == null) { throw new ArgumentNullException(nameof(elementType)); } return ArrayTypeSymbol.CreateCSharpArray(this.Assembly, elementType, ImmutableArray<CustomModifier>.Empty, rank); } /// <summary> /// Returns a new PointerTypeSymbol representing a pointer type tied to a type in this Compilation. /// </summary> internal PointerTypeSymbol CreatePointerTypeSymbol(TypeSymbol elementType) { if ((object)elementType == null) { throw new ArgumentNullException(nameof(elementType)); } return new PointerTypeSymbol(elementType); } #endregion #region Binding /// <summary> /// Gets a new SyntaxTreeSemanticModel for the specified syntax tree. /// </summary> public new SemanticModel GetSemanticModel(SyntaxTree syntaxTree, bool ignoreAccessibility) { if (syntaxTree == null) { throw new ArgumentNullException(nameof(syntaxTree)); } if (!_syntaxAndDeclarations.GetLazyState().RootNamespaces.ContainsKey(syntaxTree)) { throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, syntaxTree), nameof(syntaxTree)); } return new SyntaxTreeSemanticModel(this, (SyntaxTree)syntaxTree, ignoreAccessibility); } // When building symbols from the declaration table (lazily), or inside a type, or when // compiling a method body, we may not have a BinderContext in hand for the enclosing // scopes. Therefore, we build them when needed (and cache them) using a ContextBuilder. // Since a ContextBuilder is only a cache, and the identity of the ContextBuilders and // BinderContexts have no semantic meaning, we can reuse them or rebuild them, whichever is // most convenient. We store them using weak references so that GC pressure will cause them // to be recycled. private WeakReference<BinderFactory>[] _binderFactories; internal BinderFactory GetBinderFactory(SyntaxTree syntaxTree) { var treeNum = GetSyntaxTreeOrdinal(syntaxTree); var binderFactories = _binderFactories; if (binderFactories == null) { binderFactories = new WeakReference<BinderFactory>[this.SyntaxTrees.Length]; binderFactories = Interlocked.CompareExchange(ref _binderFactories, binderFactories, null) ?? binderFactories; } BinderFactory previousFactory; var previousWeakReference = binderFactories[treeNum]; if (previousWeakReference != null && previousWeakReference.TryGetTarget(out previousFactory)) { return previousFactory; } return AddNewFactory(syntaxTree, ref binderFactories[treeNum]); } private BinderFactory AddNewFactory(SyntaxTree syntaxTree, ref WeakReference<BinderFactory> slot) { var newFactory = new BinderFactory(this, syntaxTree); var newWeakReference = new WeakReference<BinderFactory>(newFactory); while (true) { BinderFactory previousFactory; WeakReference<BinderFactory> previousWeakReference = slot; if (previousWeakReference != null && previousWeakReference.TryGetTarget(out previousFactory)) { return previousFactory; } if (Interlocked.CompareExchange(ref slot, newWeakReference, previousWeakReference) == previousWeakReference) { return newFactory; } } } internal Binder GetBinder(SyntaxReference reference) { return GetBinderFactory(reference.SyntaxTree).GetBinder((CSharpSyntaxNode)reference.GetSyntax()); } internal Binder GetBinder(CSharpSyntaxNode syntax) { return GetBinderFactory(syntax.SyntaxTree).GetBinder(syntax); } /// <summary> /// Returns imported symbols for the given declaration. /// </summary> internal Imports GetImports(SingleNamespaceDeclaration declaration) { return GetBinderFactory(declaration.SyntaxReference.SyntaxTree).GetImportsBinder((CSharpSyntaxNode)declaration.SyntaxReference.GetSyntax()).GetImports(); } internal Imports GetSubmissionImports() { return ((SourceNamespaceSymbol)SourceModule.GlobalNamespace).GetBoundImportsMerged().SingleOrDefault() ?? Imports.Empty; } internal InteractiveUsingsBinder GetInteractiveUsingsBinder() { Debug.Assert(IsSubmission); // empty compilation: if ((object)ScriptClass == null) { Debug.Assert(_syntaxAndDeclarations.ExternalSyntaxTrees.Length == 0); return null; } return GetBinderFactory(_syntaxAndDeclarations.ExternalSyntaxTrees.Single()).GetInteractiveUsingsBinder(); } private Imports BindGlobalUsings() { return Imports.FromGlobalUsings(this); } private AliasSymbol CreateGlobalNamespaceAlias() { return AliasSymbol.CreateGlobalNamespaceAlias(this.GlobalNamespace, new InContainerBinder(this.GlobalNamespace, new BuckStopsHereBinder(this))); } private void CompleteTree(SyntaxTree tree) { bool completedCompilationUnit = false; bool completedCompilation = false; if (_lazyCompilationUnitCompletedTrees == null) Interlocked.CompareExchange(ref _lazyCompilationUnitCompletedTrees, new HashSet<SyntaxTree>(), null); lock (_lazyCompilationUnitCompletedTrees) { if (_lazyCompilationUnitCompletedTrees.Add(tree)) { completedCompilationUnit = true; if (_lazyCompilationUnitCompletedTrees.Count == this.SyntaxTrees.Length) { completedCompilation = true; } } } if (completedCompilationUnit) { EventQueue.Enqueue(new CompilationUnitCompletedEvent(this, tree)); } if (completedCompilation) { EventQueue.Enqueue(new CompilationCompletedEvent(this)); EventQueue.Complete(); // signal the end of compilation events } } internal void ReportUnusedImports(DiagnosticBag diagnostics, CancellationToken cancellationToken, SyntaxTree filterTree = null) { if (_lazyImportInfos != null) { foreach (ImportInfo info in _lazyImportInfos) { cancellationToken.ThrowIfCancellationRequested(); SyntaxTree infoTree = info.Tree; if (filterTree == null || filterTree == infoTree) { TextSpan infoSpan = info.Span; if (!this.IsImportDirectiveUsed(infoTree, infoSpan.Start)) { ErrorCode code = info.Kind == SyntaxKind.ExternAliasDirective ? ErrorCode.HDN_UnusedExternAlias : ErrorCode.HDN_UnusedUsingDirective; diagnostics.Add(code, infoTree.GetLocation(infoSpan)); } } } } // By definition, a tree is complete when all of its compiler diagnostics have been reported. // Since unused imports are the last thing we compute and report, a tree is complete when // the unused imports have been reported. if (EventQueue != null) { if (filterTree != null) { CompleteTree(filterTree); } else { foreach (var tree in this.SyntaxTrees) { CompleteTree(tree); } } } } internal void RecordImport(UsingDirectiveSyntax syntax) { RecordImportInternal(syntax); } internal void RecordImport(ExternAliasDirectiveSyntax syntax) { RecordImportInternal(syntax); } private void RecordImportInternal(CSharpSyntaxNode syntax) { LazyInitializer.EnsureInitialized(ref _lazyImportInfos). Add(new ImportInfo(syntax.SyntaxTree, syntax.Kind(), syntax.Span)); } private struct ImportInfo : IEquatable<ImportInfo> { public readonly SyntaxTree Tree; public readonly SyntaxKind Kind; public readonly TextSpan Span; public ImportInfo(SyntaxTree tree, SyntaxKind kind, TextSpan span) { this.Tree = tree; this.Kind = kind; this.Span = span; } public override bool Equals(object obj) { return (obj is ImportInfo) && Equals((ImportInfo)obj); } public bool Equals(ImportInfo other) { return other.Kind == this.Kind && other.Tree == this.Tree && other.Span == this.Span; } public override int GetHashCode() { return Hash.Combine(Tree, Span.Start); } } #endregion #region Diagnostics internal override CommonMessageProvider MessageProvider { get { return _syntaxAndDeclarations.MessageProvider; } } /// <summary> /// The bag in which semantic analysis should deposit its diagnostics. /// </summary> internal DiagnosticBag DeclarationDiagnostics { get { // We should only be placing diagnostics in this bag until // we are done gathering declaration diagnostics. Assert that is // the case. But since we have bugs (see https://github.com/dotnet/roslyn/issues/846) // we disable the assertion until they are fixed. Debug.Assert(!_declarationDiagnosticsFrozen || true); if (_lazyDeclarationDiagnostics == null) { var diagnostics = new DiagnosticBag(); Interlocked.CompareExchange(ref _lazyDeclarationDiagnostics, diagnostics, null); } return _lazyDeclarationDiagnostics; } } private IEnumerable<Diagnostic> FreezeDeclarationDiagnostics() { _declarationDiagnosticsFrozen = true; var result = _lazyDeclarationDiagnostics?.AsEnumerable() ?? Enumerable.Empty<Diagnostic>(); return result; } private DiagnosticBag _lazyDeclarationDiagnostics; private bool _declarationDiagnosticsFrozen; /// <summary> /// A bag in which diagnostics that should be reported after code gen can be deposited. /// </summary> internal DiagnosticBag AdditionalCodegenWarnings { get { return _additionalCodegenWarnings; } } private readonly DiagnosticBag _additionalCodegenWarnings = new DiagnosticBag(); internal DeclarationTable Declarations { get { return _syntaxAndDeclarations.GetLazyState().DeclarationTable; } } /// <summary> /// Gets the diagnostics produced during the parsing stage of a compilation. There are no diagnostics for declarations or accessor or /// method bodies, for example. /// </summary> public override ImmutableArray<Diagnostic> GetParseDiagnostics(CancellationToken cancellationToken = default(CancellationToken)) { return GetDiagnostics(CompilationStage.Parse, false, cancellationToken); } /// <summary> /// Gets the diagnostics produced during symbol declaration headers. There are no diagnostics for accessor or /// method bodies, for example. /// </summary> public override ImmutableArray<Diagnostic> GetDeclarationDiagnostics(CancellationToken cancellationToken = default(CancellationToken)) { return GetDiagnostics(CompilationStage.Declare, false, cancellationToken); } /// <summary> /// Gets the diagnostics produced during the analysis of method bodies and field initializers. /// </summary> public override ImmutableArray<Diagnostic> GetMethodBodyDiagnostics(CancellationToken cancellationToken = default(CancellationToken)) { return GetDiagnostics(CompilationStage.Compile, false, cancellationToken); } /// <summary> /// Gets the all the diagnostics for the compilation, including syntax, declaration, and binding. Does not /// include any diagnostics that might be produced during emit. /// </summary> public override ImmutableArray<Diagnostic> GetDiagnostics(CancellationToken cancellationToken = default(CancellationToken)) { return GetDiagnostics(DefaultDiagnosticsStage, true, cancellationToken); } internal ImmutableArray<Diagnostic> GetDiagnostics(CompilationStage stage, bool includeEarlierStages, CancellationToken cancellationToken) { var builder = DiagnosticBag.GetInstance(); if (stage == CompilationStage.Parse || (stage > CompilationStage.Parse && includeEarlierStages)) { var syntaxTrees = this.SyntaxTrees; if (this.Options.ConcurrentBuild) { var parallelOptions = cancellationToken.CanBeCanceled ? new ParallelOptions() { CancellationToken = cancellationToken } : DefaultParallelOptions; Parallel.For(0, syntaxTrees.Length, parallelOptions, UICultureUtilities.WithCurrentUICulture<int>(i => { var syntaxTree = syntaxTrees[i]; AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree); builder.AddRange(syntaxTree.GetDiagnostics(cancellationToken)); })); } else { foreach (var syntaxTree in syntaxTrees) { cancellationToken.ThrowIfCancellationRequested(); AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree); cancellationToken.ThrowIfCancellationRequested(); builder.AddRange(syntaxTree.GetDiagnostics(cancellationToken)); } } } if (stage == CompilationStage.Declare || stage > CompilationStage.Declare && includeEarlierStages) { builder.AddRange(Options.Errors); cancellationToken.ThrowIfCancellationRequested(); // the set of diagnostics related to establishing references. builder.AddRange(GetBoundReferenceManager().Diagnostics); cancellationToken.ThrowIfCancellationRequested(); builder.AddRange(GetSourceDeclarationDiagnostics(cancellationToken: cancellationToken)); } cancellationToken.ThrowIfCancellationRequested(); if (stage == CompilationStage.Compile || stage > CompilationStage.Compile && includeEarlierStages) { var methodBodyDiagnostics = DiagnosticBag.GetInstance(); GetDiagnosticsForAllMethodBodies(methodBodyDiagnostics, cancellationToken); builder.AddRangeAndFree(methodBodyDiagnostics); } // Before returning diagnostics, we filter warnings // to honor the compiler options (e.g., /nowarn, /warnaserror and /warn) and the pragmas. var result = DiagnosticBag.GetInstance(); FilterAndAppendAndFreeDiagnostics(result, ref builder); return result.ToReadOnlyAndFree<Diagnostic>(); } private static void AppendLoadDirectiveDiagnostics(DiagnosticBag builder, SyntaxAndDeclarationManager syntaxAndDeclarations, SyntaxTree syntaxTree, Func<IEnumerable<Diagnostic>, IEnumerable<Diagnostic>> locationFilterOpt = null) { ImmutableArray<LoadDirective> loadDirectives; if (syntaxAndDeclarations.GetLazyState().LoadDirectiveMap.TryGetValue(syntaxTree, out loadDirectives)) { Debug.Assert(!loadDirectives.IsEmpty); foreach (var directive in loadDirectives) { IEnumerable<Diagnostic> diagnostics = directive.Diagnostics; if (locationFilterOpt != null) { diagnostics = locationFilterOpt(diagnostics); } builder.AddRange(diagnostics); } } } // Do the steps in compilation to get the method body diagnostics, but don't actually generate // IL or emit an assembly. private void GetDiagnosticsForAllMethodBodies(DiagnosticBag diagnostics, CancellationToken cancellationToken) { MethodCompiler.CompileMethodBodies( compilation: this, moduleBeingBuiltOpt: null, generateDebugInfo: false, hasDeclarationErrors: false, diagnostics: diagnostics, filterOpt: null, cancellationToken: cancellationToken); DocumentationCommentCompiler.WriteDocumentationCommentXml(this, null, null, diagnostics, cancellationToken); this.ReportUnusedImports(diagnostics, cancellationToken); } private static bool IsDefinedOrImplementedInSourceTree(Symbol symbol, SyntaxTree tree, TextSpan? span) { if (symbol.IsDefinedInSourceTree(tree, span)) { return true; } if (symbol.IsPartialDefinition()) { MethodSymbol implementationPart = ((MethodSymbol)symbol).PartialImplementationPart; if ((object)implementationPart != null) { return implementationPart.IsDefinedInSourceTree(tree, span); } } if (symbol.Kind == SymbolKind.Method && symbol.IsImplicitlyDeclared && ((MethodSymbol)symbol).MethodKind == MethodKind.Constructor) { // Include implicitly declared constructor if containing type is included return IsDefinedOrImplementedInSourceTree(symbol.ContainingType, tree, span); } return false; } private ImmutableArray<Diagnostic> GetDiagnosticsForMethodBodiesInTree(SyntaxTree tree, TextSpan? span, CancellationToken cancellationToken) { DiagnosticBag diagnostics = DiagnosticBag.GetInstance(); MethodCompiler.CompileMethodBodies( compilation: this, moduleBeingBuiltOpt: null, generateDebugInfo: false, hasDeclarationErrors: false, diagnostics: diagnostics, filterOpt: s => IsDefinedOrImplementedInSourceTree(s, tree, span), cancellationToken: cancellationToken); DocumentationCommentCompiler.WriteDocumentationCommentXml(this, null, null, diagnostics, cancellationToken, tree, span); // Report unused directives only if computing diagnostics for the entire tree. // Otherwise we cannot determine if a particular directive is used outside of the given sub-span within the tree. if (!span.HasValue || span.Value == tree.GetRoot(cancellationToken).FullSpan) { ReportUnusedImports(diagnostics, cancellationToken, tree); } return diagnostics.ToReadOnlyAndFree(); } /// <summary> /// Filter out warnings based on the compiler options (/nowarn, /warn and /warnaserror) and the pragma warning directives. /// 'incoming' is freed. /// </summary> /// <returns>True when there is no error or warning treated as an error.</returns> internal override bool FilterAndAppendAndFreeDiagnostics(DiagnosticBag accumulator, ref DiagnosticBag incoming) { bool result = FilterAndAppendDiagnostics(accumulator, incoming.AsEnumerableWithoutResolution()); incoming.Free(); incoming = null; return result; } /// <summary> /// Filter out warnings based on the compiler options (/nowarn, /warn and /warnaserror) and the pragma warning directives. /// </summary> /// <returns>True when there is no error.</returns> private bool FilterAndAppendDiagnostics(DiagnosticBag accumulator, IEnumerable<Diagnostic> incoming) { bool hasError = false; bool reportSuppressedDiagnostics = Options.ReportSuppressedDiagnostics; foreach (Diagnostic d in incoming) { var filtered = _options.FilterDiagnostic(d); if (filtered == null || (!reportSuppressedDiagnostics && filtered.IsSuppressed)) { continue; } else if (filtered.Severity == DiagnosticSeverity.Error) { hasError = true; } accumulator.Add(filtered); } return !hasError; } private ImmutableArray<Diagnostic> GetSourceDeclarationDiagnostics(SyntaxTree syntaxTree = null, TextSpan? filterSpanWithinTree = null, Func<IEnumerable<Diagnostic>, SyntaxTree, TextSpan?, IEnumerable<Diagnostic>> locationFilterOpt = null, CancellationToken cancellationToken = default(CancellationToken)) { // global imports diagnostics (specified via compilation options): GlobalImports.Complete(cancellationToken); SourceLocation location = null; if (syntaxTree != null) { var root = syntaxTree.GetRoot(cancellationToken); location = filterSpanWithinTree.HasValue ? new SourceLocation(syntaxTree, filterSpanWithinTree.Value) : new SourceLocation(root); } Assembly.ForceComplete(location, cancellationToken); var result = this.FreezeDeclarationDiagnostics(); if (locationFilterOpt != null) { Debug.Assert(syntaxTree != null); result = locationFilterOpt(result, syntaxTree, filterSpanWithinTree); } // NOTE: Concatenate the CLS diagnostics *after* filtering by tree/span, because they're already filtered. ImmutableArray<Diagnostic> clsDiagnostics = GetClsComplianceDiagnostics(syntaxTree, filterSpanWithinTree, cancellationToken); return result.AsImmutable().Concat(clsDiagnostics); } private ImmutableArray<Diagnostic> GetClsComplianceDiagnostics(SyntaxTree syntaxTree, TextSpan? filterSpanWithinTree, CancellationToken cancellationToken) { if (syntaxTree != null) { var builder = DiagnosticBag.GetInstance(); ClsComplianceChecker.CheckCompliance(this, builder, cancellationToken, syntaxTree, filterSpanWithinTree); return builder.ToReadOnlyAndFree(); } if (_lazyClsComplianceDiagnostics.IsDefault) { var builder = DiagnosticBag.GetInstance(); ClsComplianceChecker.CheckCompliance(this, builder, cancellationToken); ImmutableInterlocked.InterlockedInitialize(ref _lazyClsComplianceDiagnostics, builder.ToReadOnlyAndFree()); } Debug.Assert(!_lazyClsComplianceDiagnostics.IsDefault); return _lazyClsComplianceDiagnostics; } private static IEnumerable<Diagnostic> FilterDiagnosticsByLocation(IEnumerable<Diagnostic> diagnostics, SyntaxTree tree, TextSpan? filterSpanWithinTree) { foreach (var diagnostic in diagnostics) { if (diagnostic.ContainsLocation(tree, filterSpanWithinTree)) { yield return diagnostic; } } } internal ImmutableArray<Diagnostic> GetDiagnosticsForSyntaxTree( CompilationStage stage, SyntaxTree syntaxTree, TextSpan? filterSpanWithinTree, bool includeEarlierStages, CancellationToken cancellationToken = default(CancellationToken)) { cancellationToken.ThrowIfCancellationRequested(); var builder = DiagnosticBag.GetInstance(); if (stage == CompilationStage.Parse || (stage > CompilationStage.Parse && includeEarlierStages)) { AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree, diagnostics => FilterDiagnosticsByLocation(diagnostics, syntaxTree, filterSpanWithinTree)); var syntaxDiagnostics = syntaxTree.GetDiagnostics(); syntaxDiagnostics = FilterDiagnosticsByLocation(syntaxDiagnostics, syntaxTree, filterSpanWithinTree); builder.AddRange(syntaxDiagnostics); } cancellationToken.ThrowIfCancellationRequested(); if (stage == CompilationStage.Declare || (stage > CompilationStage.Declare && includeEarlierStages)) { var declarationDiagnostics = GetSourceDeclarationDiagnostics(syntaxTree, filterSpanWithinTree, FilterDiagnosticsByLocation, cancellationToken); Debug.Assert(declarationDiagnostics.All(d => d.ContainsLocation(syntaxTree, filterSpanWithinTree))); builder.AddRange(declarationDiagnostics); } cancellationToken.ThrowIfCancellationRequested(); if (stage == CompilationStage.Compile || (stage > CompilationStage.Compile && includeEarlierStages)) { //remove some errors that don't have locations in the tree, like "no suitable main method." //Members in trees other than the one being examined are not compiled. This includes field //initializers which can result in 'field is never initialized' warnings for fields in partial //types when the field is in a different source file than the one for which we're getting diagnostics. //For that reason the bag must be also filtered by tree. IEnumerable<Diagnostic> methodBodyDiagnostics = GetDiagnosticsForMethodBodiesInTree(syntaxTree, filterSpanWithinTree, cancellationToken); // TODO: Enable the below commented assert and remove the filtering code in the next line. // GetDiagnosticsForMethodBodiesInTree seems to be returning diagnostics with locations that don't satisfy the filter tree/span, this must be fixed. // Debug.Assert(methodBodyDiagnostics.All(d => DiagnosticContainsLocation(d, syntaxTree, filterSpanWithinTree))); methodBodyDiagnostics = FilterDiagnosticsByLocation(methodBodyDiagnostics, syntaxTree, filterSpanWithinTree); builder.AddRange(methodBodyDiagnostics); } // Before returning diagnostics, we filter warnings // to honor the compiler options (/nowarn, /warnaserror and /warn) and the pragmas. var result = DiagnosticBag.GetInstance(); FilterAndAppendAndFreeDiagnostics(result, ref builder); return result.ToReadOnlyAndFree<Diagnostic>(); } #endregion #region Resources protected override void AppendDefaultVersionResource(Stream resourceStream) { var sourceAssembly = SourceAssembly; string fileVersion = sourceAssembly.FileVersion ?? sourceAssembly.Identity.Version.ToString(); Win32ResourceConversions.AppendVersionToResourceStream(resourceStream, !this.Options.OutputKind.IsApplication(), fileVersion: fileVersion, originalFileName: this.SourceModule.Name, internalName: this.SourceModule.Name, productVersion: sourceAssembly.InformationalVersion ?? fileVersion, fileDescription: sourceAssembly.Title ?? " ", //alink would give this a blank if nothing was supplied. assemblyVersion: sourceAssembly.Identity.Version, legalCopyright: sourceAssembly.Copyright ?? " ", //alink would give this a blank if nothing was supplied. legalTrademarks: sourceAssembly.Trademark, productName: sourceAssembly.Product, comments: sourceAssembly.Description, companyName: sourceAssembly.Company); } #endregion #region Emit internal override byte LinkerMajorVersion => 0x30; internal override bool IsDelaySigned { get { return SourceAssembly.IsDelaySigned; } } internal override StrongNameKeys StrongNameKeys { get { return SourceAssembly.StrongNameKeys; } } internal override CommonPEModuleBuilder CreateModuleBuilder( EmitOptions emitOptions, IMethodSymbol debugEntryPoint, IEnumerable<ResourceDescription> manifestResources, CompilationTestData testData, DiagnosticBag diagnostics, CancellationToken cancellationToken) { Debug.Assert(!IsSubmission || HasCodeToEmit()); string runtimeMDVersion = GetRuntimeMetadataVersion(emitOptions, diagnostics); if (runtimeMDVersion == null) { return null; } var moduleProps = ConstructModuleSerializationProperties(emitOptions, runtimeMDVersion); if (manifestResources == null) { manifestResources = SpecializedCollections.EmptyEnumerable<ResourceDescription>(); } PEModuleBuilder moduleBeingBuilt; if (_options.OutputKind.IsNetModule()) { moduleBeingBuilt = new PENetModuleBuilder( (SourceModuleSymbol)SourceModule, emitOptions, moduleProps, manifestResources); } else { var kind = _options.OutputKind.IsValid() ? _options.OutputKind : OutputKind.DynamicallyLinkedLibrary; moduleBeingBuilt = new PEAssemblyBuilder( SourceAssembly, emitOptions, kind, moduleProps, manifestResources); } if (debugEntryPoint != null) { moduleBeingBuilt.SetDebugEntryPoint((MethodSymbol)debugEntryPoint, diagnostics); } // testData is only passed when running tests. if (testData != null) { moduleBeingBuilt.SetMethodTestData(testData.Methods); testData.Module = moduleBeingBuilt; } return moduleBeingBuilt; } internal override bool CompileImpl( CommonPEModuleBuilder moduleBuilder, Stream win32Resources, Stream xmlDocStream, bool emittingPdb, DiagnosticBag diagnostics, Predicate<ISymbol> filterOpt, CancellationToken cancellationToken) { // The diagnostics should include syntax and declaration errors. We insert these before calling Emitter.Emit, so that the emitter // does not attempt to emit if there are declaration errors (but we do insert all errors from method body binding...) bool hasDeclarationErrors = !FilterAndAppendDiagnostics(diagnostics, GetDiagnostics(CompilationStage.Declare, true, cancellationToken)); // TODO (tomat): NoPIA: // EmbeddedSymbolManager.MarkAllDeferredSymbolsAsReferenced(this) var moduleBeingBuilt = (PEModuleBuilder)moduleBuilder; if (moduleBeingBuilt.EmitOptions.EmitMetadataOnly) { if (hasDeclarationErrors) { return false; } if (moduleBeingBuilt.SourceModule.HasBadAttributes) { // If there were errors but no declaration diagnostics, explicitly add a "Failed to emit module" error. diagnostics.Add(ErrorCode.ERR_ModuleEmitFailure, NoLocation.Singleton, ((Cci.INamedEntity)moduleBeingBuilt).Name); return false; } SynthesizedMetadataCompiler.ProcessSynthesizedMembers(this, moduleBeingBuilt, cancellationToken); } else { if (emittingPdb && !StartSourceChecksumCalculation(moduleBeingBuilt, diagnostics)) { return false; } // Perform initial bind of method bodies in spite of earlier errors. This is the same // behavior as when calling GetDiagnostics() // Use a temporary bag so we don't have to refilter pre-existing diagnostics. DiagnosticBag methodBodyDiagnosticBag = DiagnosticBag.GetInstance(); MethodCompiler.CompileMethodBodies( this, moduleBeingBuilt, emittingPdb, hasDeclarationErrors, diagnostics: methodBodyDiagnosticBag, filterOpt: filterOpt, cancellationToken: cancellationToken); SetupWin32Resources(moduleBeingBuilt, win32Resources, methodBodyDiagnosticBag); ReportManifestResourceDuplicates( moduleBeingBuilt.ManifestResources, SourceAssembly.Modules.Skip(1).Select((m) => m.Name), //all modules except the first one AddedModulesResourceNames(methodBodyDiagnosticBag), methodBodyDiagnosticBag); bool hasMethodBodyErrorOrWarningAsError = !FilterAndAppendAndFreeDiagnostics(diagnostics, ref methodBodyDiagnosticBag); if (hasDeclarationErrors || hasMethodBodyErrorOrWarningAsError) { return false; } } cancellationToken.ThrowIfCancellationRequested(); // Use a temporary bag so we don't have to refilter pre-existing diagnostics. DiagnosticBag xmlDiagnostics = DiagnosticBag.GetInstance(); string assemblyName = FileNameUtilities.ChangeExtension(moduleBeingBuilt.EmitOptions.OutputNameOverride, extension: null); DocumentationCommentCompiler.WriteDocumentationCommentXml(this, assemblyName, xmlDocStream, xmlDiagnostics, cancellationToken); if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref xmlDiagnostics)) { return false; } // Use a temporary bag so we don't have to refilter pre-existing diagnostics. DiagnosticBag importDiagnostics = DiagnosticBag.GetInstance(); this.ReportUnusedImports(importDiagnostics, cancellationToken); if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref importDiagnostics)) { Debug.Assert(false, "Should never produce an error"); return false; } return true; } // TODO: consider unifying with VB private bool StartSourceChecksumCalculation(PEModuleBuilder moduleBeingBuilt, DiagnosticBag diagnostics) { var syntaxTrees = this.SyntaxTrees; // Check that all syntax trees are debuggable: bool allTreesDebuggable = true; foreach (var tree in syntaxTrees) { if (!string.IsNullOrEmpty(tree.FilePath) && tree.GetText().Encoding == null) { diagnostics.Add(ErrorCode.ERR_EncodinglessSyntaxTree, tree.GetRoot().GetLocation()); allTreesDebuggable = false; } } if (!allTreesDebuggable) { return false; } // Add debug documents for all trees with distinct paths. foreach (var tree in syntaxTrees) { if (!string.IsNullOrEmpty(tree.FilePath)) { // compilation does not guarantee that all trees will have distinct paths. // Do not attempt adding a document for a particular path if we already added one. string normalizedPath = moduleBeingBuilt.NormalizeDebugDocumentPath(tree.FilePath, basePath: null); var existingDoc = moduleBeingBuilt.TryGetDebugDocumentForNormalizedPath(normalizedPath); if (existingDoc == null) { moduleBeingBuilt.AddDebugDocument(MakeDebugSourceDocumentForTree(normalizedPath, tree)); } } } // Add debug documents for all pragmas. // If there are clashes with already processed directives, report warnings. // If there are clashes with debug documents that came from actual trees, ignore the pragma. foreach (var tree in syntaxTrees) { AddDebugSourceDocumentsForChecksumDirectives(moduleBeingBuilt, tree, diagnostics); } return true; } private IEnumerable<string> AddedModulesResourceNames(DiagnosticBag diagnostics) { ImmutableArray<ModuleSymbol> modules = SourceAssembly.Modules; for (int i = 1; i < modules.Length; i++) { var m = (Symbols.Metadata.PE.PEModuleSymbol)modules[i]; ImmutableArray<EmbeddedResource> resources; try { resources = m.Module.GetEmbeddedResourcesOrThrow(); } catch (BadImageFormatException) { diagnostics.Add(new CSDiagnosticInfo(ErrorCode.ERR_BindToBogus, m), NoLocation.Singleton); continue; } foreach (var resource in resources) { yield return resource.Name; } } } internal override EmitDifferenceResult EmitDifference( EmitBaseline baseline, IEnumerable<SemanticEdit> edits, Func<ISymbol, bool> isAddedSymbol, Stream metadataStream, Stream ilStream, Stream pdbStream, ICollection<MethodDefinitionHandle> updatedMethods, CompilationTestData testData, CancellationToken cancellationToken) { return EmitHelpers.EmitDifference( this, baseline, edits, isAddedSymbol, metadataStream, ilStream, pdbStream, updatedMethods, testData, cancellationToken); } internal string GetRuntimeMetadataVersion(EmitOptions emitOptions, DiagnosticBag diagnostics) { string runtimeMDVersion = GetRuntimeMetadataVersion(emitOptions); if (runtimeMDVersion != null) { return runtimeMDVersion; } DiagnosticBag runtimeMDVersionDiagnostics = DiagnosticBag.GetInstance(); runtimeMDVersionDiagnostics.Add(ErrorCode.WRN_NoRuntimeMetadataVersion, NoLocation.Singleton); if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref runtimeMDVersionDiagnostics)) { return null; } return string.Empty; //prevent emitter from crashing. } private string GetRuntimeMetadataVersion(EmitOptions emitOptions) { var corAssembly = Assembly.CorLibrary as Symbols.Metadata.PE.PEAssemblySymbol; if ((object)corAssembly != null) { return corAssembly.Assembly.ManifestModule.MetadataVersion; } return emitOptions.RuntimeMetadataVersion; } private static void AddDebugSourceDocumentsForChecksumDirectives( PEModuleBuilder moduleBeingBuilt, SyntaxTree tree, DiagnosticBag diagnostics) { var checksumDirectives = tree.GetRoot().GetDirectives(d => d.Kind() == SyntaxKind.PragmaChecksumDirectiveTrivia && !d.ContainsDiagnostics); foreach (var directive in checksumDirectives) { var checksumDirective = (PragmaChecksumDirectiveTriviaSyntax)directive; var path = checksumDirective.File.ValueText; var checksumText = checksumDirective.Bytes.ValueText; var normalizedPath = moduleBeingBuilt.NormalizeDebugDocumentPath(path, basePath: tree.FilePath); var existingDoc = moduleBeingBuilt.TryGetDebugDocumentForNormalizedPath(normalizedPath); // duplicate checksum pragmas are valid as long as values match // if we have seen this document already, check for matching values. if (existingDoc != null) { // pragma matches a file path on an actual tree. // Dev12 compiler just ignores the pragma in this case which means that // checksum of the actual tree always wins and no warning is given. // We will continue doing the same. if (existingDoc.IsComputedChecksum) { continue; } var checksumAndAlgorithm = existingDoc.ChecksumAndAlgorithm; if (ChecksumMatches(checksumText, checksumAndAlgorithm.Item1)) { var guid = Guid.Parse(checksumDirective.Guid.ValueText); if (guid == checksumAndAlgorithm.Item2) { // all parts match, nothing to do continue; } } // did not match to an existing document // produce a warning and ignore the pragma diagnostics.Add(ErrorCode.WRN_ConflictingChecksum, new SourceLocation(checksumDirective), path); } else { var newDocument = new Cci.DebugSourceDocument( normalizedPath, Cci.DebugSourceDocument.CorSymLanguageTypeCSharp, MakeChecksumBytes(checksumDirective.Bytes.ValueText), Guid.Parse(checksumDirective.Guid.ValueText)); moduleBeingBuilt.AddDebugDocument(newDocument); } } } private static bool ChecksumMatches(string bytesText, ImmutableArray<byte> bytes) { if (bytesText.Length != bytes.Length * 2) { return false; } for (int i = 0, len = bytesText.Length / 2; i < len; i++) { // 1A in text becomes 0x1A var b = SyntaxFacts.HexValue(bytesText[i * 2]) * 16 + SyntaxFacts.HexValue(bytesText[i * 2 + 1]); if (b != bytes[i]) { return false; } } return true; } private static ImmutableArray<byte> MakeChecksumBytes(string bytesText) { int length = bytesText.Length / 2; var builder = ArrayBuilder<byte>.GetInstance(length); for (int i = 0; i < length; i++) { // 1A in text becomes 0x1A var b = SyntaxFacts.HexValue(bytesText[i * 2]) * 16 + SyntaxFacts.HexValue(bytesText[i * 2 + 1]); builder.Add((byte)b); } return builder.ToImmutableAndFree(); } private static Cci.DebugSourceDocument MakeDebugSourceDocumentForTree(string normalizedPath, SyntaxTree tree) { return new Cci.DebugSourceDocument(normalizedPath, Cci.DebugSourceDocument.CorSymLanguageTypeCSharp, () => tree.GetChecksumAndAlgorithm()); } private void SetupWin32Resources(PEModuleBuilder moduleBeingBuilt, Stream win32Resources, DiagnosticBag diagnostics) { if (win32Resources == null) return; switch (DetectWin32ResourceForm(win32Resources)) { case Win32ResourceForm.COFF: moduleBeingBuilt.Win32ResourceSection = MakeWin32ResourcesFromCOFF(win32Resources, diagnostics); break; case Win32ResourceForm.RES: moduleBeingBuilt.Win32Resources = MakeWin32ResourceList(win32Resources, diagnostics); break; default: diagnostics.Add(ErrorCode.ERR_BadWin32Res, NoLocation.Singleton, "Unrecognized file format."); break; } } internal override bool HasCodeToEmit() { foreach (var syntaxTree in this.SyntaxTrees) { var unit = syntaxTree.GetCompilationUnitRoot(); if (unit.Members.Count > 0) { return true; } } return false; } #endregion #region Common Members protected override Compilation CommonWithReferences(IEnumerable<MetadataReference> newReferences) { return WithReferences(newReferences); } protected override Compilation CommonWithAssemblyName(string assemblyName) { return WithAssemblyName(assemblyName); } protected override ITypeSymbol CommonGetSubmissionResultType(out bool hasValue) { return GetSubmissionResultType(out hasValue); } protected override IAssemblySymbol CommonAssembly { get { return this.Assembly; } } protected override INamespaceSymbol CommonGlobalNamespace { get { return this.GlobalNamespace; } } protected override CompilationOptions CommonOptions { get { return _options; } } protected override Compilation CommonPreviousSubmission { get { return _previousSubmission; } } protected override SemanticModel CommonGetSemanticModel(SyntaxTree syntaxTree, bool ignoreAccessibility) { return this.GetSemanticModel((SyntaxTree)syntaxTree, ignoreAccessibility); } protected override IEnumerable<SyntaxTree> CommonSyntaxTrees { get { return this.SyntaxTrees; } } protected override Compilation CommonAddSyntaxTrees(IEnumerable<SyntaxTree> trees) { return this.AddSyntaxTrees(trees); } protected override Compilation CommonRemoveSyntaxTrees(IEnumerable<SyntaxTree> trees) { return this.RemoveSyntaxTrees(trees); } protected override Compilation CommonRemoveAllSyntaxTrees() { return this.RemoveAllSyntaxTrees(); } protected override Compilation CommonReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree) { return this.ReplaceSyntaxTree((SyntaxTree)oldTree, (SyntaxTree)newTree); } protected override Compilation CommonWithOptions(CompilationOptions options) { return this.WithOptions((CSharpCompilationOptions)options); } protected override Compilation CommonWithPreviousSubmission(Compilation newPreviousSubmission) { return this.WithPreviousSubmission((CSharpCompilation)newPreviousSubmission); } protected override bool CommonContainsSyntaxTree(SyntaxTree syntaxTree) { return this.ContainsSyntaxTree((SyntaxTree)syntaxTree); } protected override ISymbol CommonGetAssemblyOrModuleSymbol(MetadataReference reference) { return this.GetAssemblyOrModuleSymbol(reference); } protected override Compilation CommonClone() { return this.Clone(); } protected override IModuleSymbol CommonSourceModule { get { return this.SourceModule; } } protected override INamedTypeSymbol CommonGetSpecialType(SpecialType specialType) { return this.GetSpecialType(specialType); } protected override INamespaceSymbol CommonGetCompilationNamespace(INamespaceSymbol namespaceSymbol) { return this.GetCompilationNamespace(namespaceSymbol); } protected override INamedTypeSymbol CommonGetTypeByMetadataName(string metadataName) { return this.GetTypeByMetadataName(metadataName); } protected override INamedTypeSymbol CommonScriptClass { get { return this.ScriptClass; } } protected override IArrayTypeSymbol CommonCreateArrayTypeSymbol(ITypeSymbol elementType, int rank) { return CreateArrayTypeSymbol(elementType.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("elementType"), rank); } protected override IPointerTypeSymbol CommonCreatePointerTypeSymbol(ITypeSymbol elementType) { return CreatePointerTypeSymbol(elementType.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("elementType")); } protected override ITypeSymbol CommonDynamicType { get { return DynamicType; } } protected override INamedTypeSymbol CommonObjectType { get { return this.ObjectType; } } protected override IMethodSymbol CommonGetEntryPoint(CancellationToken cancellationToken) { return this.GetEntryPoint(cancellationToken); } internal override int CompareSourceLocations(Location loc1, Location loc2) { Debug.Assert(loc1.IsInSource); Debug.Assert(loc2.IsInSource); var comparison = CompareSyntaxTreeOrdering(loc1.SourceTree, loc2.SourceTree); if (comparison != 0) { return comparison; } return loc1.SourceSpan.Start - loc2.SourceSpan.Start; } /// <summary> /// Return true if there is a source declaration symbol name that meets given predicate. /// </summary> public override bool ContainsSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter = SymbolFilter.TypeAndMember, CancellationToken cancellationToken = default(CancellationToken)) { if (predicate == null) { throw new ArgumentNullException(nameof(predicate)); } if (filter == SymbolFilter.None) { throw new ArgumentException(CSharpResources.NoNoneSearchCriteria, nameof(filter)); } return this.Declarations.ContainsName(predicate, filter, cancellationToken); } /// <summary> /// Return source declaration symbols whose name meets given predicate. /// </summary> public override IEnumerable<ISymbol> GetSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter = SymbolFilter.TypeAndMember, CancellationToken cancellationToken = default(CancellationToken)) { if (predicate == null) { throw new ArgumentNullException(nameof(predicate)); } if (filter == SymbolFilter.None) { throw new ArgumentException(CSharpResources.NoNoneSearchCriteria, nameof(filter)); } return new SymbolSearcher(this).GetSymbolsWithName(predicate, filter, cancellationToken); } #endregion /// <summary> /// Returns if the compilation has all of the members necessary to emit metadata about /// dynamic types. /// </summary> /// <returns></returns> internal bool HasDynamicEmitAttributes() { return (object)GetWellKnownTypeMember(WellKnownMember.System_Runtime_CompilerServices_DynamicAttribute__ctor) != null && (object)GetWellKnownTypeMember(WellKnownMember.System_Runtime_CompilerServices_DynamicAttribute__ctorTransformFlags) != null; } internal override AnalyzerDriver AnalyzerForLanguage(ImmutableArray<DiagnosticAnalyzer> analyzers, AnalyzerManager analyzerManager) { return new AnalyzerDriver<SyntaxKind>(analyzers, n => n.Kind(), analyzerManager); } internal void SymbolDeclaredEvent(Symbol symbol) { if (EventQueue != null) EventQueue.Enqueue(new SymbolDeclaredCompilationEvent(this, symbol)); } /// <summary> /// Determine if enum arrays can be initialized using block initialization. /// </summary> /// <returns>True if it's safe to use block initialization for enum arrays.</returns> /// <remarks> /// In NetFx 4.0, block array initializers do not work on all combinations of {32/64 X Debug/Retail} when array elements are enums. /// This is fixed in 4.5 thus enabling block array initialization for a very common case. /// We look for the presence of <see cref="System.Runtime.GCLatencyMode.SustainedLowLatency"/> which was introduced in .Net 4.5 /// </remarks> internal bool EnableEnumArrayBlockInitialization { get { var sustainedLowLatency = GetWellKnownTypeMember(WellKnownMember.System_Runtime_GCLatencyMode__SustainedLowLatency); return sustainedLowLatency != null && sustainedLowLatency.ContainingAssembly == Assembly.CorLibrary; } } private class SymbolSearcher { private readonly Dictionary<Declaration, NamespaceOrTypeSymbol> _cache; private readonly CSharpCompilation _compilation; public SymbolSearcher(CSharpCompilation compilation) { _cache = new Dictionary<Declaration, NamespaceOrTypeSymbol>(); _compilation = compilation; } public IEnumerable<ISymbol> GetSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter, CancellationToken cancellationToken) { var result = new HashSet<ISymbol>(); var spine = new List<MergedNamespaceOrTypeDeclaration>(); AppendSymbolsWithName(spine, _compilation.Declarations.MergedRoot, predicate, filter, result, cancellationToken); return result; } private void AppendSymbolsWithName( List<MergedNamespaceOrTypeDeclaration> spine, MergedNamespaceOrTypeDeclaration current, Func<string, bool> predicate, SymbolFilter filter, HashSet<ISymbol> set, CancellationToken cancellationToken) { var includeNamespace = (filter & SymbolFilter.Namespace) == SymbolFilter.Namespace; var includeType = (filter & SymbolFilter.Type) == SymbolFilter.Type; var includeMember = (filter & SymbolFilter.Member) == SymbolFilter.Member; if (current.Kind == DeclarationKind.Namespace) { if (includeNamespace && predicate(current.Name)) { var container = GetSpineSymbol(spine); set.Add(GetSymbol(container, current)); } } else { if (includeType && predicate(current.Name)) { var container = GetSpineSymbol(spine); set.Add(GetSymbol(container, current)); } if (includeMember) { AppendMemberSymbolsWithName(spine, current, predicate, set, cancellationToken); } } spine.Add(current); foreach (var child in current.Children.OfType<MergedNamespaceOrTypeDeclaration>()) { if (includeMember || includeType) { AppendSymbolsWithName(spine, child, predicate, filter, set, cancellationToken); continue; } if (child.Kind == DeclarationKind.Namespace) { AppendSymbolsWithName(spine, child, predicate, filter, set, cancellationToken); } } // pop last one spine.RemoveAt(spine.Count - 1); } private void AppendMemberSymbolsWithName( List<MergedNamespaceOrTypeDeclaration> spine, MergedNamespaceOrTypeDeclaration current, Func<string, bool> predicate, HashSet<ISymbol> set, CancellationToken cancellationToken) { spine.Add(current); var container = GetSpineSymbol(spine); foreach (var member in container.GetMembers()) { if (!member.IsTypeOrTypeAlias() && (member.CanBeReferencedByName || member.IsExplicitInterfaceImplementation() || member.IsIndexer()) && predicate(member.Name)) { set.Add(member); } } spine.RemoveAt(spine.Count - 1); } private NamespaceOrTypeSymbol GetSpineSymbol(List<MergedNamespaceOrTypeDeclaration> spine) { if (spine.Count == 0) { return null; } var symbol = GetCachedSymbol(spine[spine.Count - 1]); if (symbol != null) { return symbol; } var current = _compilation.GlobalNamespace as NamespaceOrTypeSymbol; for (var i = 1; i < spine.Count; i++) { current = GetSymbol(current, spine[i]); } return current; } private NamespaceOrTypeSymbol GetCachedSymbol(MergedNamespaceOrTypeDeclaration declaration) { NamespaceOrTypeSymbol symbol; if (_cache.TryGetValue(declaration, out symbol)) { return symbol; } return null; } private NamespaceOrTypeSymbol GetSymbol(NamespaceOrTypeSymbol container, MergedNamespaceOrTypeDeclaration declaration) { if (container == null) { return _compilation.GlobalNamespace; } if (declaration.Kind == DeclarationKind.Namespace) { AddCache(container.GetMembers(declaration.Name).OfType<NamespaceOrTypeSymbol>()); } else { AddCache(container.GetTypeMembers(declaration.Name)); } return GetCachedSymbol(declaration); } private void AddCache(IEnumerable<NamespaceOrTypeSymbol> symbols) { foreach (var symbol in symbols) { var mergedNamespace = symbol as MergedNamespaceSymbol; if (mergedNamespace != null) { _cache[mergedNamespace.ConstituentNamespaces.OfType<SourceNamespaceSymbol>().First().MergedDeclaration] = symbol; continue; } var sourceNamespace = symbol as SourceNamespaceSymbol; if (sourceNamespace != null) { _cache[sourceNamespace.MergedDeclaration] = sourceNamespace; continue; } var sourceType = symbol as SourceMemberContainerTypeSymbol; if (sourceType != null) { _cache[sourceType.MergedDeclaration] = sourceType; } } } } } }
EricArndt/roslyn
src/Compilers/CSharp/Portable/Compilation/CSharpCompilation.cs
C#
apache-2.0
126,286
/******************************************************************************* * Copyright 2015 Software Evolution and Architecture Lab, University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package CloudWave; public enum CloudWaveJNI { instance; public static final String CLOUDWAVE_LIB = "cloudwavejni"; CloudWaveJNI() {System.loadLibrary(CLOUDWAVE_LIB);} public static CloudWaveJNI getInstance(){return instance;} public void init() throws CloudWaveException{ int r = initJNI(); if (r<0) { System.err.println("initJNI returned " + r); throw new CloudWaveException(); } } public void free(){ freeJNI(); } protected IEventHandler eventHandler; public IEventHandler getEventHandler() { return eventHandler; } public void setEventHandler(IEventHandler eh) { synchronized(this){ eventHandler = eh;} } public void doEvent(String event){ synchronized(this) { if (eventHandler!=null) eventHandler.doEvent(event); } } protected synchronized static void callback(String event){ instance.doEvent(event); } //#: Init/Free public native int initJNI(); protected native int freeJNI(); //:# //#: Log protected native int initLog(); protected native int freeLog(); protected native int setLogId(String id); protected native String getLogId(); protected native int recordLog(int level, String message); protected native int recordLogL(int level, String message, long id); //:# //#: Metric protected native int initMetric(); protected native int freeMetric(); protected native int recordMetricL(int source, String name, String mdata, String munit, int type, long value); protected native int recordMetricD(int source, String name, String mdata, String munit, int type, double value); protected native int recordMetricS(int source, String name, String mdata, String munit, int type, String value); protected native int recordEventL(int source, String name, String mdata, String munit, int type, long value); protected native int recordEventD(int source, String name, String mdata, String munit, int type, double value); protected native int recordEventS(int source, String name, String mdata, String munit, int type, String value); //:# //#: Events protected native int initEvent(); protected native int freeEvent(); protected native int postEvent(String event_json); protected native long subscribe(String event_id); protected native int unsubscribe(long id); //:# }
harinigunabalan/PerformanceHat
cw-feedback-handler/src/main/java/CloudWave/CloudWaveJNI.java
Java
apache-2.0
3,270