code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
/** @file UDF/ECMA-167 file system driver. Copyright (C) 2014-2017 Paulo Alcantara <pcacjr@zytor.com> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ #include "Udf.h" // // UDF filesystem driver's Global Variables. // EFI_DRIVER_BINDING_PROTOCOL gUdfDriverBinding = { UdfDriverBindingSupported, UdfDriverBindingStart, UdfDriverBindingStop, 0x10, NULL, NULL }; EFI_SIMPLE_FILE_SYSTEM_PROTOCOL gUdfSimpleFsTemplate = { EFI_SIMPLE_FILE_SYSTEM_PROTOCOL_REVISION, UdfOpenVolume }; /** Test to see if this driver supports ControllerHandle. Any ControllerHandle than contains a BlockIo and DiskIo protocol or a BlockIo2 protocol can be supported. @param[in] This Protocol instance pointer. @param[in] ControllerHandle Handle of device to test. @param[in] RemainingDevicePath Optional parameter use to pick a specific child device to start. @retval EFI_SUCCESS This driver supports this device. @retval EFI_ALREADY_STARTED This driver is already running on this device. @retval other This driver does not support this device. **/ EFI_STATUS EFIAPI UdfDriverBindingSupported ( IN EFI_DRIVER_BINDING_PROTOCOL *This, IN EFI_HANDLE ControllerHandle, IN EFI_DEVICE_PATH_PROTOCOL *RemainingDevicePath ) { EFI_STATUS Status; EFI_DISK_IO_PROTOCOL *DiskIo; // // Open DiskIo protocol on ControllerHandle // Status = gBS->OpenProtocol ( ControllerHandle, &gEfiDiskIoProtocolGuid, (VOID **)&DiskIo, This->DriverBindingHandle, ControllerHandle, EFI_OPEN_PROTOCOL_BY_DRIVER ); if (EFI_ERROR (Status)) { return Status; } // // Close DiskIo protocol on ControllerHandle // gBS->CloseProtocol ( ControllerHandle, &gEfiDiskIoProtocolGuid, This->DriverBindingHandle, ControllerHandle ); // // Test whether ControllerHandle supports BlockIo protocol // Status = gBS->OpenProtocol ( ControllerHandle, &gEfiBlockIoProtocolGuid, NULL, This->DriverBindingHandle, ControllerHandle, EFI_OPEN_PROTOCOL_TEST_PROTOCOL ); return Status; } /** Start this driver on ControllerHandle by opening a Block IO or a Block IO2 or both, and Disk IO protocol, reading Device Path, and creating a child handle with a Disk IO and device path protocol. @param[in] This Protocol instance pointer. @param[in] ControllerHandle Handle of device to bind driver to @param[in] RemainingDevicePath Optional parameter use to pick a specific child device to start. @retval EFI_SUCCESS This driver is added to ControllerHandle. @retval EFI_ALREADY_STARTED This driver is already running on ControllerHandle. @retval other This driver does not support this device. **/ EFI_STATUS EFIAPI UdfDriverBindingStart ( IN EFI_DRIVER_BINDING_PROTOCOL *This, IN EFI_HANDLE ControllerHandle, IN EFI_DEVICE_PATH_PROTOCOL *RemainingDevicePath ) { EFI_TPL OldTpl; EFI_STATUS Status; EFI_BLOCK_IO_PROTOCOL *BlockIo; EFI_DISK_IO_PROTOCOL *DiskIo; PRIVATE_UDF_SIMPLE_FS_DATA *PrivFsData; OldTpl = gBS->RaiseTPL (TPL_CALLBACK); // // Open BlockIo protocol on ControllerHandle // Status = gBS->OpenProtocol ( ControllerHandle, &gEfiBlockIoProtocolGuid, (VOID **)&BlockIo, This->DriverBindingHandle, ControllerHandle, EFI_OPEN_PROTOCOL_GET_PROTOCOL ); ASSERT_EFI_ERROR (Status); // // Open DiskIo protocol on ControllerHandle // Status = gBS->OpenProtocol ( ControllerHandle, &gEfiDiskIoProtocolGuid, (VOID **)&DiskIo, This->DriverBindingHandle, ControllerHandle, EFI_OPEN_PROTOCOL_BY_DRIVER ); ASSERT_EFI_ERROR (Status); // // Check if ControllerHandle supports an UDF file system // Status = SupportUdfFileSystem (This, ControllerHandle); if (EFI_ERROR (Status)) { goto Exit; } // // Initialize private file system structure // PrivFsData = (PRIVATE_UDF_SIMPLE_FS_DATA *) AllocateZeroPool (sizeof (PRIVATE_UDF_SIMPLE_FS_DATA)); if (PrivFsData == NULL) { Status = EFI_OUT_OF_RESOURCES; goto Exit; } // // Create new child handle // PrivFsData->Signature = PRIVATE_UDF_SIMPLE_FS_DATA_SIGNATURE; PrivFsData->BlockIo = BlockIo; PrivFsData->DiskIo = DiskIo; PrivFsData->Handle = ControllerHandle; // // Set up SimpleFs protocol // CopyMem ((VOID *)&PrivFsData->SimpleFs, (VOID *)&gUdfSimpleFsTemplate, sizeof (EFI_SIMPLE_FILE_SYSTEM_PROTOCOL)); // // Install child handle // Status = gBS->InstallMultipleProtocolInterfaces ( &PrivFsData->Handle, &gEfiSimpleFileSystemProtocolGuid, &PrivFsData->SimpleFs, NULL ); Exit: if (EFI_ERROR (Status)) { // // Close DiskIo protocol on ControllerHandle // gBS->CloseProtocol ( ControllerHandle, &gEfiDiskIoProtocolGuid, This->DriverBindingHandle, ControllerHandle ); // // Close BlockIo protocol on ControllerHandle // gBS->CloseProtocol ( ControllerHandle, &gEfiBlockIoProtocolGuid, This->DriverBindingHandle, ControllerHandle ); } gBS->RestoreTPL (OldTpl); return Status; } /** Stop this driver on ControllerHandle. Support stopping any child handles created by this driver. @param This Protocol instance pointer. @param ControllerHandle Handle of device to stop driver on @param NumberOfChildren Number of Handles in ChildHandleBuffer. If number of children is zero stop the entire bus driver. @param ChildHandleBuffer List of Child Handles to Stop. @retval EFI_SUCCESS This driver is removed ControllerHandle @retval other This driver was not removed from this device **/ EFI_STATUS EFIAPI UdfDriverBindingStop ( IN EFI_DRIVER_BINDING_PROTOCOL *This, IN EFI_HANDLE ControllerHandle, IN UINTN NumberOfChildren, IN EFI_HANDLE *ChildHandleBuffer ) { PRIVATE_UDF_SIMPLE_FS_DATA *PrivFsData; EFI_STATUS Status; EFI_SIMPLE_FILE_SYSTEM_PROTOCOL *SimpleFs; // // Open SimpleFs protocol on ControllerHandle // Status = gBS->OpenProtocol ( ControllerHandle, &gEfiSimpleFileSystemProtocolGuid, (VOID **)&SimpleFs, This->DriverBindingHandle, ControllerHandle, EFI_OPEN_PROTOCOL_GET_PROTOCOL ); if (!EFI_ERROR (Status)) { PrivFsData = PRIVATE_UDF_SIMPLE_FS_DATA_FROM_THIS (SimpleFs); // // Uninstall child handle // Status = gBS->UninstallMultipleProtocolInterfaces ( PrivFsData->Handle, &gEfiSimpleFileSystemProtocolGuid, &PrivFsData->SimpleFs, NULL ); FreePool ((VOID *)PrivFsData); } if (!EFI_ERROR (Status)) { // // Close DiskIo protocol on ControllerHandle // gBS->CloseProtocol ( ControllerHandle, &gEfiDiskIoProtocolGuid, This->DriverBindingHandle, ControllerHandle ); // // Close BlockIo protocol on ControllerHandle // gBS->CloseProtocol ( ControllerHandle, &gEfiBlockIoProtocolGuid, This->DriverBindingHandle, ControllerHandle ); } return Status; } /** The user Entry Point for UDF file system driver. The user code starts with this function. @param[in] ImageHandle The firmware allocated handle for the EFI image. @param[in] SystemTable A pointer to the EFI System Table. @retval EFI_SUCCESS The entry point is executed successfully. @retval other Some error occurs when executing this entry point. **/ EFI_STATUS EFIAPI InitializeUdf ( IN EFI_HANDLE ImageHandle, IN EFI_SYSTEM_TABLE *SystemTable ) { EFI_STATUS Status; Status = EfiLibInstallDriverBindingComponentName2 ( ImageHandle, SystemTable, &gUdfDriverBinding, ImageHandle, &gUdfComponentName, &gUdfComponentName2 ); ASSERT_EFI_ERROR (Status); return Status; }
google/google-ctf
third_party/edk2/MdeModulePkg/Universal/Disk/UdfDxe/Udf.c
C
apache-2.0
9,008
<faceted-datatable params="{columns: $component.tableColumns, options: $component.facetOptions, reference: mappedConcepts, rowCallback: $component.contextSensitiveLinkColor}"></faceted-datatable>
OHDSI/Calypso
js/modules/conceptsetbuilder/components/MappedConcepts.html
HTML
apache-2.0
196
canvas { display:block; margin:0px auto; height:600px; width:400px; border:none; }
NicolasMilligan/CanvasClock
clock.css
CSS
apache-2.0
102
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.intercept; import org.apache.camel.ContextTestSupport; import org.apache.camel.builder.RouteBuilder; import org.junit.Test; /** * Testing http://camel.apache.org/dsl.html */ public class InterceptFromSimpleLogTest extends ContextTestSupport { @Test public void testInterceptLog() throws Exception { getMockEndpoint("mock:result").expectedMessageCount(1); getMockEndpoint("mock:first").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() throws Exception { // START SNIPPET: e1 // intercept all incomming routes and log it interceptFrom().to("log:received"); // and here we have a couple of routes from("direct:start").to("mock:first").to("seda:bar"); from("seda:bar").to("mock:result"); // END SNIPPET: e1 } }; } }
davidkarlsen/camel
core/camel-core/src/test/java/org/apache/camel/processor/intercept/InterceptFromSimpleLogTest.java
Java
apache-2.0
1,955
imgUrl1="data/afficheimg/20140121tpclwh.jpg"; imgtext1=""; imgLink1=escape("http://www.haoid.cn"); imgUrl2="data/afficheimg/20140121tiskvu.jpg"; imgtext2=""; imgLink2=escape("http://www.haoid.cn"); imgUrl3="data/afficheimg/20140121gmlyoj.jpg"; imgtext3=""; imgLink3=escape("http://www.haoid.cn"); var pics=imgUrl1+"|"+imgUrl2+"|"+imgUrl3; var links=imgLink1+"|"+imgLink2+"|"+imgLink3; var texts=imgtext1+"|"+imgtext2+"|"+imgtext3;
lvguocai/ec
data/flashdata/pinkfocus/data.js
JavaScript
apache-2.0
431
package org.apereo.cas.authentication.principal.cache; import org.apereo.cas.authentication.principal.Principal; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.util.spring.ApplicationContextProvider; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; import lombok.val; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Wrapper around an attribute repository where attributes cached for a configurable period * based on google guava's caching library. * * @author Misagh Moayyed * @since 4.2 */ @ToString(callSuper = true) @Slf4j @Getter @Setter @NoArgsConstructor @EqualsAndHashCode(callSuper = true, of = {"timeUnit", "expiration"}) public class CachingPrincipalAttributesRepository extends AbstractPrincipalAttributesRepository { private static final long serialVersionUID = 6350244643948535906L; /** * The expiration time. */ protected long expiration; /** * Expiration time unit. */ protected String timeUnit; @JsonCreator public CachingPrincipalAttributesRepository(@JsonProperty("timeUnit") final String timeUnit, @JsonProperty("expiration") final long expiryDuration) { this.timeUnit = timeUnit; this.expiration = expiryDuration; } @Override public Map<String, List<Object>> getAttributes(final Principal principal, final RegisteredService registeredService) { val mergeStrategy = determineMergingStrategy(); LOGGER.trace("Determined merging strategy as [{}]", mergeStrategy); val cachedAttributes = getCachedPrincipalAttributes(principal, registeredService); if (cachedAttributes != null && !cachedAttributes.isEmpty()) { LOGGER.debug("Found [{}] cached attributes for principal [{}] that are [{}]", cachedAttributes.size(), principal.getId(), cachedAttributes); return cachedAttributes; } val principalAttributes = getPrincipalAttributes(principal); LOGGER.trace("Principal attributes extracted for [{}] are [{}]", principal.getId(), principalAttributes); if (areAttributeRepositoryIdsDefined()) { val personDirectoryAttributes = retrievePersonAttributesFromAttributeRepository(principal.getId()); LOGGER.debug("Found [{}] attributes for principal [{}] from the attribute repository.", personDirectoryAttributes.size(), principal.getId()); LOGGER.debug("Merging current principal attributes with that of the repository via strategy [{}]", mergeStrategy); val mergedAttributes = mergeStrategy.getAttributeMerger().mergeAttributes(principalAttributes, personDirectoryAttributes); return convertAttributesToPrincipalAttributesAndCache(principal, mergedAttributes, registeredService); } return convertAttributesToPrincipalAttributesAndCache(principal, principalAttributes, registeredService); } @Override protected void addPrincipalAttributes(final String id, final Map<String, List<Object>> attributes, final RegisteredService registeredService) { try { val cache = getCacheInstanceFromApplicationContext(); cache.putCachedAttributesFor(registeredService, this, id, attributes); LOGGER.trace("Cached attributes for [{}]", id); } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } } /** * Gets cached principal attributes. * * @param principal the principal * @param registeredService the registered service * @return the cached principal attributes */ @JsonIgnore protected Map<String, List<Object>> getCachedPrincipalAttributes(final Principal principal, final RegisteredService registeredService) { try { val cache = getCacheInstanceFromApplicationContext(); return cache.getCachedAttributesFor(registeredService, this, principal); } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return new HashMap<>(0); } /** * Gets cache instance from application context. * * @return the cache instance from application context */ @JsonIgnore public PrincipalAttributesRepositoryCache getCacheInstanceFromApplicationContext() { val ctx = ApplicationContextProvider.getApplicationContext(); return ctx.getBean("principalAttributesRepositoryCache", PrincipalAttributesRepositoryCache.class); } }
rrenomeron/cas
core/cas-server-core-authentication-attributes/src/main/java/org/apereo/cas/authentication/principal/cache/CachingPrincipalAttributesRepository.java
Java
apache-2.0
4,862
package org.apache.hadoop.hdfs.server.namenode; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; import java.io.IOException; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.namenode.NamenodeJspHelper.XMLCorruptBlockInfo; import org.apache.hadoop.util.ServletUtil; import org.znerd.xmlenc.*; public final class corrupt_005freplicas_005fxml_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent { /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* This script outputs information about corrupt replicas on the system (as XML). The script takes two GET parameters: - numCorruptBlocks The number of corrupt blocks to return. Must be >= 0 && <= 100. Defaults to 10. - startingBlockId The block id (as a long) from which to begin iterating. Output does not include the starting block id (it begins at the following block id). If not given, iteration starts from beginning. Example output is below: <corrupt_block_info> <dfs_replication>1</dfs_replication> <num_missing_blocks>1</num_missing_blocks> <num_corrupt_replica_blocks>1</num_corrupt_replica_blocks> <corrupt_replica_block_ids> <block_id>-2207002825050436217</block_id> </corrupt_replica_block_ids> </corrupt_block_info> Notes: - corrupt_block_info/corrupt_replica_block_ids will 0 to numCorruptBlocks children - If an error exists, corrupt_block_info/error will exist and contain a human readable error message */ private static final long serialVersionUID = 1L; private static java.util.List _jspx_dependants; public Object getDependants() { return _jspx_dependants; } public void _jspService(HttpServletRequest request, HttpServletResponse response) throws java.io.IOException, ServletException { JspFactory _jspxFactory = null; PageContext pageContext = null; HttpSession session = null; ServletContext application = null; ServletConfig config = null; JspWriter out = null; Object page = this; JspWriter _jspx_out = null; PageContext _jspx_page_context = null; try { _jspxFactory = JspFactory.getDefaultFactory(); response.setContentType("application/xml"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; out.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); out.write('\n'); out.write('\n'); out.write('\n'); NameNode nn = (NameNode)application.getAttribute("name.node"); FSNamesystem fsn = nn.getNamesystem(); Integer numCorruptBlocks = 10; try { Long l = JspHelper.validateLong(request.getParameter("numCorruptBlocks")); if (l != null) { numCorruptBlocks = l.intValue(); } } catch(NumberFormatException e) { } Long startingBlockId = null; try { startingBlockId = JspHelper.validateLong(request.getParameter("startingBlockId")); } catch(NumberFormatException e) { } XMLCorruptBlockInfo cbi = new XMLCorruptBlockInfo(fsn, new Configuration(), numCorruptBlocks, startingBlockId); XMLOutputter doc = new XMLOutputter(out, "UTF-8"); cbi.toXML(doc); } catch (Throwable t) { if (!(t instanceof SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) out.clearBuffer(); if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); } } finally { if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context); } } }
weilu/Hadoop-Resource-Aware-Scheduler
hdfs/build/src/org/apache/hadoop/hdfs/server/namenode/corrupt_005freplicas_005fxml_jsp.java
Java
apache-2.0
4,917
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Syntax; namespace Core.SyntaxRewriters { /// <summary> /// Writes out the syntax required for an interface. Only classes implement interfaces, but interfaces can inherit from other interfaces /// </summary> public class InterfaceImplementer : CSharpSyntaxRewriter { public override SyntaxNode VisitClassDeclaration(ClassDeclarationSyntax node) { // TODO: determine the interfaces implemented and what they require return base.VisitClassDeclaration(node); } } }
DefectiveCube/IDE
IDE/Core/SyntaxRewriters/InterfaceImplementer.cs
C#
apache-2.0
758
package support; /** * Data bucket holding information about an airport. * * @version JDSL 2 */ public class AirportSpecs { private String code_, name_; private int gmtOff_; private int x_, y_; private String label_; // accessors /** * @return the 3-letter code of the airport (PVD, etc) */ public String code() { return code_; } /** * @return the full name of the airport (Providence T.F. Green, etc) */ public String name() { return name_; } /** The GMT offset is the number of minutes to be added to transform * from Greenwich Mean Time to local airport time. In the United States, * this number is negative. To transform local airport time to GMT, * you need to subtract the offset, which means subtracting a negative * number. */ public int GMToffset() { return gmtOff_; } /** * @return the x position (in pixels) of the airport on the map */ public int x() { return x_; } /** * @return the y position (in pixels) of the airport on the map */ public int y() { return y_; } /** * @return the time zone, in hhmm format (i.e. -500) */ public String timeZone() { return Integer.toString(gmtOff_/60) + Integer.toString(gmtOff_%60) ; } /** * @return The String with which the vertex representing this airport was labelled */ public String label() { return label_; } /** * Labels the AirportSpecs instance with the given Object. * This Object may be used to hold special scratch information. * * @param <i>label</i> The label with which to mark the AirportSpecs */ public void setLabel(String label) { label_ = label; } /** * Initializes an airport specification. * * @param <i>code</i> Airport code * @param <i>city</i> City in which it is located * @param <i>gmtOffset</i> is a GMT offset of this city * @param <i>x</i> x-coordinate on the map * @param <i>y</i> y-coordinate on the map */ public AirportSpecs(String code, String city, int gmtOffset, int x, int y) { code_ = code; name_ = city; gmtOff_ = gmtOffset; x_ = x; y_ = y; label_ = name_ + " (" + code_ + ")"; } /** * Gives a string in a format useful for debugging. (If you have an * AirportSpecs a, you can just <code>System.err.println(a)</code>.) * * @return A string corresponding to the airport. */ public String toString() { return code_ + " " + name_ + " at (" + x_ + "," + y_ + ") with GMT offset " + gmtOff_; } } // end class def
patrickfav/tuwien
master/swt workspace/ModelJUnit 2.0 beta1/modeljunit/jdsl/tutorial/lesson07/support/AirportSpecs.java
Java
apache-2.0
2,578
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.jdbc.pool; import java.util.Hashtable; import javax.management.MBeanRegistration; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.jdbc.pool.jmx.JmxUtil; /** * A DataSource that can be instantiated through IoC and implements the DataSource interface * since the DataSourceProxy is used as a generic proxy. * The DataSource simply wraps a {@link ConnectionPool} in order to provide a standard interface to the user. * @version 1.0 */ public class DataSource extends DataSourceProxy implements javax.sql.DataSource,MBeanRegistration, org.apache.tomcat.jdbc.pool.jmx.ConnectionPoolMBean, javax.sql.ConnectionPoolDataSource { private static final Log log = LogFactory.getLog(DataSource.class); /** * Constructor for reflection only. A default set of pool properties will be created. */ public DataSource() { super(); } /** * Constructs a DataSource object wrapping a connection * @param poolProperties The pool properties */ public DataSource(PoolConfiguration poolProperties) { super(poolProperties); } //=============================================================================== // JMX Operations - Register the actual pool itself under the tomcat.jdbc domain //=============================================================================== protected volatile ObjectName oname = null; /** * Unregisters the underlying connection pool mbean.<br> * {@inheritDoc} */ @Override public void postDeregister() { if (oname!=null) { unregisterJmx(); } } /** * no-op<br> * {@inheritDoc} */ @Override public void postRegister(Boolean registrationDone) { // NOOP } /** * no-op<br> * {@inheritDoc} */ @Override public void preDeregister() throws Exception { // NOOP } /** * If the connection pool MBean exists, it will be registered during this operation.<br> * {@inheritDoc} */ @Override public ObjectName preRegister(MBeanServer server, ObjectName name) throws Exception { try { if ( isJmxEnabled() ) { this.oname = createObjectName(name); if (oname!=null) { registerJmx(); } } }catch (MalformedObjectNameException x) { log.error("Unable to create object name for JDBC pool.",x); } return name; } /** * Creates the ObjectName for the ConnectionPoolMBean object to be registered * @param original the ObjectName for the DataSource * @return the ObjectName for the ConnectionPoolMBean * @throws MalformedObjectNameException Invalid object name */ public ObjectName createObjectName(ObjectName original) throws MalformedObjectNameException { String domain = ConnectionPool.POOL_JMX_DOMAIN; Hashtable<String,String> properties = original.getKeyPropertyList(); String origDomain = original.getDomain(); properties.put("type", "ConnectionPool"); properties.put("class", this.getClass().getName()); if (original.getKeyProperty("path")!=null || properties.get("context")!=null) { //this ensures that if the registration came from tomcat, we're not losing //the unique domain, but putting that into as an engine attribute properties.put("engine", origDomain); } ObjectName name = new ObjectName(domain,properties); return name; } /** * Registers the ConnectionPoolMBean under a unique name based on the ObjectName for the DataSource */ protected void registerJmx() { if (pool.getJmxPool()!=null) { JmxUtil.registerJmx(oname, null, pool.getJmxPool()); } } /** * */ protected void unregisterJmx() { JmxUtil.unregisterJmx(oname); } }
apache/tomcat
modules/jdbc-pool/src/main/java/org/apache/tomcat/jdbc/pool/DataSource.java
Java
apache-2.0
4,950
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.agent.download; import io.fabric8.agent.download.impl.DownloadManagerHelper; import org.junit.Test; import static org.junit.Assert.assertEquals; public class DownloadManagerHelperTest { @Test public void testStripUrl() { String artifact = "mvn:my/artifact/1.0"; assertEquals(artifact, DownloadManagerHelper.stripUrl(artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("wrap:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("wrap:"+artifact+"$Bundle-Version=1.1")); assertEquals(artifact, DownloadManagerHelper.stripUrl("war:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("war:"+artifact+"?Webapp-Context=test")); assertEquals(artifact, DownloadManagerHelper.stripUrl("war:jar:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("webbundle:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("warref:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("war-i:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("spring:"+artifact)); assertEquals(artifact, DownloadManagerHelper.stripUrl("blueprint:"+artifact)); } }
janstey/fabric8
fabric/fabric-agent/src/test/java/io/fabric8/agent/download/DownloadManagerHelperTest.java
Java
apache-2.0
1,902
/* * Copyright 2005 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.common; import org.drools.core.RuleBaseConfiguration; import org.drools.core.reteoo.BetaMemory; import org.drools.core.reteoo.builder.BuildContext; import org.drools.core.rule.ContextEntry; import org.drools.core.rule.MutableTypeConstraint; import org.drools.core.rule.constraint.MvelConstraint; import org.drools.core.spi.BetaNodeFieldConstraint; import org.drools.core.spi.Tuple; import org.drools.core.util.bitmask.BitMask; import org.drools.core.util.index.IndexUtil; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.List; import static org.drools.core.reteoo.PropertySpecificUtil.allSetButTraitBitMask; public class SingleBetaConstraints implements BetaConstraints { private static final long serialVersionUID = 510l; protected BetaNodeFieldConstraint constraint; private boolean indexed; private transient boolean disableIndex; public SingleBetaConstraints() { } public SingleBetaConstraints(final BetaNodeFieldConstraint[] constraint, final RuleBaseConfiguration conf) { this(constraint[0], conf, false); } public SingleBetaConstraints(final BetaNodeFieldConstraint constraint, final RuleBaseConfiguration conf) { this(constraint, conf, false); } public SingleBetaConstraints(final BetaNodeFieldConstraint constraint, final RuleBaseConfiguration conf, final boolean disableIndex) { this.constraint = constraint; this.disableIndex = disableIndex; } public void init(BuildContext context, short betaNodeType) { RuleBaseConfiguration config = context.getKnowledgeBase().getConfiguration(); if ((disableIndex) || (!config.isIndexLeftBetaMemory() && !config.isIndexRightBetaMemory())) { this.indexed = false; } else { initIndexes(config.getCompositeKeyDepth(), betaNodeType); } } public void initIndexes(int depth, short betaNodeType) { indexed = depth >= 1 && IndexUtil.isIndexableForNode(betaNodeType, constraint); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { constraint = (BetaNodeFieldConstraint) in.readObject(); indexed = in.readBoolean(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(constraint); out.writeBoolean(indexed); } public SingleBetaConstraints cloneIfInUse() { if (constraint instanceof MutableTypeConstraint && ((MutableTypeConstraint) constraint).setInUse()) { SingleBetaConstraints clone = new SingleBetaConstraints(constraint.cloneIfInUse(), null, disableIndex); clone.indexed = indexed; return clone; } return this; } public ContextEntry[] createContext() { return new ContextEntry[]{this.constraint.createContextEntry()}; } /* (non-Javadoc) * @see org.kie.common.BetaNodeConstraints#updateFromTuple(org.kie.reteoo.ReteTuple) */ public void updateFromTuple(final ContextEntry[] context, final InternalWorkingMemory workingMemory, final Tuple tuple) { context[0].updateFromTuple(workingMemory, tuple); } /* (non-Javadoc) * @see org.kie.common.BetaNodeConstraints#updateFromFactHandle(org.kie.common.InternalFactHandle) */ public void updateFromFactHandle(final ContextEntry[] context, final InternalWorkingMemory workingMemory, final InternalFactHandle handle) { context[0].updateFromFactHandle(workingMemory, handle); } /* (non-Javadoc) * @see org.kie.common.BetaNodeConstraints#isAllowedCachedLeft(java.lang.Object) */ public boolean isAllowedCachedLeft(final ContextEntry[] context, final InternalFactHandle handle) { return this.indexed || this.constraint.isAllowedCachedLeft(context[0], handle); } /* (non-Javadoc) * @see org.kie.common.BetaNodeConstraints#isAllowedCachedRight(org.kie.reteoo.ReteTuple) */ public boolean isAllowedCachedRight(final ContextEntry[] context, final Tuple tuple) { return this.constraint.isAllowedCachedRight(tuple, context[0]); } public boolean isIndexed() { return this.indexed; } public int getIndexCount() { return (this.indexed ? 1 : 0); } public boolean isEmpty() { return false; } public BetaMemory createBetaMemory(final RuleBaseConfiguration config, final short nodeType) { return IndexUtil.Factory.createBetaMemory(config, nodeType, constraint); } public int hashCode() { return this.constraint.hashCode(); } public BetaNodeFieldConstraint getConstraint() { return this.constraint; } /* (non-Javadoc) * @see org.kie.common.BetaNodeConstraints#getConstraints() */ public BetaNodeFieldConstraint[] getConstraints() { return new BetaNodeFieldConstraint[]{this.constraint}; } /** * Determine if another object is equal to this. * * @param object The object to test. * @return <code>true</code> if <code>object</code> is equal to this, * otherwise <code>false</code>. */ public boolean equals(final Object object) { if (this == object) { return true; } if (object == null || getClass() != object.getClass()) { return false; } final SingleBetaConstraints other = (SingleBetaConstraints) object; return this.constraint == other.constraint || this.constraint.equals(other.constraint); } public void resetFactHandle(ContextEntry[] context) { context[0].resetFactHandle(); } public void resetTuple(ContextEntry[] context) { context[0].resetTuple(); } public BetaConstraints getOriginalConstraint() { throw new UnsupportedOperationException(); } public BitMask getListenedPropertyMask(List<String> settableProperties) { return constraint instanceof MvelConstraint ? ((MvelConstraint) constraint).getListenedPropertyMask(settableProperties) : allSetButTraitBitMask(); } public boolean isLeftUpdateOptimizationAllowed() { return true; } public void registerEvaluationContext(BuildContext buildContext) { if (this.constraint instanceof MvelConstraint) { ((MvelConstraint) this.constraint).registerEvaluationContext(buildContext); } } }
vinodkiran/drools
drools-core/src/main/java/org/drools/core/common/SingleBetaConstraints.java
Java
apache-2.0
7,700
# Licensed to the .NET Foundation under one or more agreements. # The .NET Foundation licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information. from iptest.assert_util import * add_clr_assemblies("loadorder_3") # namespace First { # public class Generic1<K, V> { # public static string Flag = typeof(Generic1<,>).FullName; # } # } import First AreEqual(First.Generic1[int, int].Flag, "First.Generic1`2") add_clr_assemblies("loadorder_3g") # namespace First { # public class Generic1<K, V> { # public static string Flag = typeof(Generic1<,>).FullName + "_Same"; # } # } AreEqual(First.Generic1[int, int].Flag, "First.Generic1`2_Same") from First import * AreEqual(Generic1[int, int].Flag, "First.Generic1`2_Same")
slozier/ironpython2
Tests/interop/net/loadorder/t3g1.py
Python
apache-2.0
833
(function () { "use strict"; angular .module('mnWizard') .controller('mnWizardStep4Controller', mnWizardStep4Controller); function mnWizardStep4Controller($scope, $state, mnWizardStep4Service, pools, mnPromiseHelper) { var vm = this; vm.isEnterprise = pools.isEnterprise; vm.sendStats = true; vm.onSubmit = onSubmit; vm.register = { email: '', firstname: '', lastname: '', company: '', version: (pools.implementationVersion || 'unknown') }; function onSubmit() { if (pools.isEnterprise) { vm.form.agree.$setValidity('required', !!vm.register.agree); } if (vm.form.$invalid || vm.viewLoading) { return; } vm.register.email && mnWizardStep4Service.postEmail(vm.register); var promise = mnWizardStep4Service.postStats({sendStats: vm.sendStats}); mnPromiseHelper(vm, promise) .showErrorsSensitiveSpinner() .catchErrors() .getPromise() .then(function () { $state.go('app.wizard.step5'); }); }; } })();
ceejatec/ns_server
priv/public/ui/app-classic/mn_wizard/step4/mn_wizard_step4_controller.js
JavaScript
apache-2.0
1,150
from __future__ import absolute_import import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings") import copy import logging import django from api.webview.models import HarvesterResponse, Document, Version from scrapi import events from scrapi.util import json_without_bytes from scrapi.linter import RawDocument, NormalizedDocument from scrapi.processing import DocumentTuple from scrapi.processing.base import BaseProcessor, BaseHarvesterResponse, BaseDatabaseManager django.setup() logger = logging.getLogger(__name__) class DatabaseManager(BaseDatabaseManager): '''All database management is performed by django''' def setup(self): return True def tear_down(self): pass def clear(self, force=False): pass def celery_setup(self, *args, **kwargs): pass def paginated(query, page_size=10): for offset in range(0, query.count(), page_size): for doc in query[offset:offset + page_size]: yield doc class PostgresProcessor(BaseProcessor): NAME = 'postgres' manager = DatabaseManager() def documents(self, *sources): q = Document.objects.all() querysets = (q.filter(source=source) for source in sources) if sources else [q] for query in querysets: for doc in paginated(query): try: raw = RawDocument(doc.raw, clean=False, validate=False) except AttributeError as e: logger.info('{} -- Malformed rawdoc in database, skipping'.format(e)) raw = None continue normalized = NormalizedDocument(doc.normalized, validate=False, clean=False) if doc.normalized else None yield DocumentTuple(raw, normalized) def get(self, source, docID): try: document = Document.objects.get(source=source, docID=docID) except Document.DoesNotExist: return None raw = RawDocument(document.raw, clean=False, validate=False) normalized = NormalizedDocument(document.normalized, validate=False, clean=False) if document.normalized else None return DocumentTuple(raw, normalized) def delete(self, source, docID): doc = Document.objects.get(source=source, docID=docID) doc.delete() def create(self, attributes): attributes = json_without_bytes(attributes) Document.objects.create( source=attributes['source'], docID=attributes['docID'], providerUpdatedDateTime=None, raw=attributes, normalized=None ).save() @property def HarvesterResponseModel(self): return HarvesterResponseModel @events.logged(events.PROCESSING, 'raw.postgres') def process_raw(self, raw_doc): document = self.version(raw=raw_doc) timestamps = raw_doc.get('timestamps') modified_doc = copy.deepcopy(raw_doc.attributes) document.raw = modified_doc document.timestamps = timestamps document.save() @events.logged(events.PROCESSING, 'normalized.postgres') def process_normalized(self, raw_doc, normalized): document = self.version(raw=raw_doc, normalized=normalized) timestamps = raw_doc.get('timestamps') or normalized.get('timestamps') document.raw = raw_doc.attributes document.timestamps = timestamps document.normalized = normalized.attributes document.providerUpdatedDateTime = normalized['providerUpdatedDateTime'] document.save() def _get_by_source_id(self, source, docID): try: return Document.objects.get(key=Document._make_key(source, docID)) except Document.DoesNotExist: return None def version(self, raw=None, normalized=None): old_doc = self._get_by_source_id(raw['source'], raw['docID']) if old_doc: raw_changed = raw and self.different(raw.attributes, old_doc.raw) norm_changed = normalized and self.different(normalized.attributes, old_doc.normalized) version = Version( key=old_doc, source=old_doc.source, docID=old_doc.docID, providerUpdatedDateTime=old_doc.providerUpdatedDateTime, raw=old_doc.raw, normalized=old_doc.normalized, status=old_doc.status, timestamps=old_doc.timestamps ) if raw_changed or norm_changed: version.save() return old_doc else: return Document.objects.create(source=raw['source'], docID=raw['docID']) def get_versions(self, source, docID): doc = self._get_by_source_id(source, docID) for version in doc.version_set.all().order_by('id'): yield DocumentTuple( RawDocument(version.raw, clean=False, validate=False), NormalizedDocument(version.normalized, clean=False, validate=False) ) yield DocumentTuple( RawDocument(doc.raw, clean=False, validate=False), NormalizedDocument(doc.normalized, clean=False, validate=False) ) class HarvesterResponseModel(BaseHarvesterResponse): response = None def __init__(self, *args, **kwargs): if kwargs: key = kwargs['method'].lower() + kwargs['url'].lower() self.response = HarvesterResponse(key=key, *args, **kwargs) else: self.response = args[0] @property def method(self): return str(self.response.method) @property def url(self): return str(self.response.url) @property def ok(self): return bool(self.response.ok) @property def content(self): if isinstance(self.response.content, memoryview): return self.response.content.tobytes() if isinstance(self.response.content, bytes): return self.response.content return str(self.response.content) @property def encoding(self): return str(self.response.encoding) @property def headers_str(self): return str(self.response.headers_str) @property def status_code(self): return int(self.response.status_code) @property def time_made(self): return str(self.response.time_made) def save(self, *args, **kwargs): self.response.save() return self def update(self, **kwargs): for k, v in kwargs.items(): setattr(self.response, k, v) return self.save() @classmethod def get(cls, url=None, method=None): key = method.lower() + url.lower() try: return cls(HarvesterResponse.objects.get(key=key)) except HarvesterResponse.DoesNotExist: raise cls.DoesNotExist
erinspace/scrapi
scrapi/processing/postgres.py
Python
apache-2.0
6,903
package org.traccar.protocol; import org.junit.Test; import org.traccar.ProtocolTest; public class R12wProtocolDecoderTest extends ProtocolTest { @Test public void testDecode() throws Exception { var decoder = new R12wProtocolDecoder(null); verifyNull(decoder, text( "$HX,0001,860721009104316,e92c,933402042499509,55792760080,12345678,01,a8d940a9,#,50,")); } }
orcoliver/traccar
src/test/java/org/traccar/protocol/R12wProtocolDecoderTest.java
Java
apache-2.0
412
from module import *
Skaper/RMCStudio
images/emotion/eyes/type1/ico/__init__.py
Python
apache-2.0
21
/************************************************************** OLAP demo queries Works for MySQL (with kludge for CUBE) SQLite and Postgres don't support ROLLUP or CUBE **************************************************************/ /************************************************************** Full star join **************************************************************/ select * from Sales F, Store S, Item I, Customer C where F.storeID = S.storeID and F.itemID = I.itemID and F.custID = C.custID; /************************************************************** Star join with selections and projections All inexpensive Tshirts sold in California to young people **************************************************************/ select S.city, I.color, C.cName, F.price from Sales F, Store S, Item I, Customer C where F.storeID = S.storeID and F.itemID = I.itemID and F.custID = C.custID and S.state = 'CA' and I.category = 'Tshirt' and C.age < 22 and F.price < 25; /************************************************************** Grouping and aggregation over fact table Total sales by store and customer **************************************************************/ select storeID, custID, sum(price) from Sales group by storeID, custID; /************************************************************** Drill-down Total sales by store, item, and customer **************************************************************/ select storeID, itemID, custID, sum(price) from Sales group by storeID, itemID, custID; /************************************************************** "Slice" Total sales by store, item, and customer for Washington stores only **************************************************************/ select F.storeID, itemID, custID, sum(price) from Sales F, Store S where F.storeID = S.storeID and state = 'WA' group by F.storeID, itemID, custID; /************************************************************** "Dice" Total sales by store, item, and customer for Washington stores and red items only **************************************************************/ select F.storeID, I.itemID, custID, sum(price) from Sales F, Store S, Item I where F.storeID = S.storeID and F.itemID = I.itemID and state = 'WA' and color = 'red' group by F.storeID, I.itemID, custID; /************************************************************** Roll-up Total sales by item **************************************************************/ /*** Back to detailed query, then roll-up ***/ select storeID, itemID, custID, sum(price) from Sales group by storeID, itemID, custID; select itemID, sum(price) from Sales group by itemID; /************************************************************** Grouping and aggregation using non-dimension attributes Total sales by state and category **************************************************************/ select state, category, sum(price) from Sales F, Store S, Item I where F.storeID = S.storeID and F.itemID = I.itemID group by state, category; /************************************************************** Drill-down Total sales by state, county, and category **************************************************************/ select state, county, category, sum(price) from Sales F, Store S, Item I where F.storeID = S.storeID and F.itemID = I.itemID group by state, county, category; /************************************************************** Drill-down even further Total sales by state, county, category, and gender **************************************************************/ select state, county, category, gender, sum(price) from Sales F, Store S, Item I, Customer C where F.storeID = S.storeID and F.itemID = I.itemID and F.custID = C.custID group by state, county, category, gender; /************************************************************** Roll-up Total sales by state and gender **************************************************************/ select state, gender, sum(price) from Sales F, Store S, Customer C where F.storeID = S.storeID and F.custID = C.custID group by state, gender; /************************************************************** WITH CUBE Adds faces, edges, and corners of data cube **************************************************************/ /*** Not supported ***/ select storeID, itemID, custID, sum(price) from Sales group by storeID, itemID, custID with cube; /*** This query gives same result ***/ select storeID, itemID, custID, sum(price) from Sales group by storeID, itemID, custID with rollup union select storeID, itemID, custID, sum(price) from Sales group by itemID, custID, storeID with rollup union select storeID, itemID, custID, sum(price) from Sales group by custID, storeID, itemID with rollup; /*** Double-check triple-NULL ***/ select sum(price) from Sales; /************************************************************** CUBE as materialized view **************************************************************/ create table Cube as select storeID, itemID, custID, sum(price) as p from Sales group by storeID, itemID, custID with rollup union select storeID, itemID, custID, sum(price) as p from Sales group by itemID, custID, storeID with rollup union select storeID, itemID, custID, sum(price) as p from Sales group by custID, storeID, itemID with rollup; /************************************************************** Query over CUBE view Total sales of blue items in California **************************************************************/ /*** First without final sum, then with ***/ select C.* from Cube C, Store S, Item I where C.storeID = S.storeID and C.itemID = I.itemID and state = 'CA' and color = 'blue' and custID is null; select sum(p) from Cube C, Store S, Item I where C.storeID = S.storeID and C.itemID = I.itemID and state = 'CA' and color = 'blue' and custID is null; /*** Now on non-NULL portion of cube ***/ select C.* from Cube C, Store S, Item I where C.storeID = S.storeID and C.itemID = I.itemID and state = 'CA' and color = 'blue' and custID is not null; select sum(p) from Cube C, Store S, Item I where C.storeID = S.storeID and C.itemID = I.itemID and state = 'CA' and color = 'blue' and custID is not null; /*** On original Sales table ***/ select F.* from Sales F, Store S, Item I where F.storeID = S.storeID and F.itemID = I.itemID and state = 'CA' and color = 'blue' and F.custID is not null; select sum(price) from Sales F, Store S, Item I where F.storeID = S.storeID and F.itemID = I.itemID and state = 'CA' and color = 'blue' and F.custID is not null; /************************************************************** WITH CUBE on subset of grouping attributes **************************************************************/ /*** Not supported ***/ select storeID, itemID, custID, sum(price) from Sales F group by storeID, itemID, custID with cube(storeID, custID); /*** This query gives same result ***/ select * from (select storeID, itemID, custID, sum(price) from Sales F group by itemID, storeID, custID with rollup) X where X.itemID is not null union select * from (select storeID, itemID, custID, sum(price) from Sales F group by itemID, custID, storeID with rollup) X where X.itemID is not null and X.custID is not null; /************************************************************** WITH ROLLUP **************************************************************/ select storeID, itemID, custID, sum(price) from Sales F group by storeID, itemID, custID with rollup; /************************************************************** WITH ROLLUP on hierarchical grouping attributes Total sales by state, county, city **************************************************************/ select state, county, city, sum(price) from Sales F, Store S where F.storeID = S.storeID group by state, county, city; select state, county, city, sum(price) from Sales F, Store S where F.storeID = S.storeID group by state, county, city with rollup;
hemmerling/sql-stanford2013
src/16_olap/OLAPqueries.sql
SQL
apache-2.0
8,013
package eduni.simdiag; import java.util.List; import java.util.ArrayList; import java.applet.Applet; /** * Generates sin/cos graph from equation. * The output format is a stream of * GraphEventObjects which can be read and displayed by * a GraphDiagram. * This class may be useful as an example of how to * generate a graph from user simulations: * see the <a href="../../eduni/simdiag/GraphEqn.java">source code</a> */ public class GraphEqn implements Runnable { transient Thread thread; /** Creates a graph generator (Sine, Cos) */ public GraphEqn() { } /** Called after output has been wired */ public void startRunning() { thread = new Thread(this); thread.start(); } /** Generates the graph */ public void run() { double delta = 0.1; // while (true) { forwardGraph( new GraphClearObject(this) ); forwardGraph( new GraphSetAxes(this,"X","Y") ); for (double x = 0.0; x<20; x+=0.1) { forwardGraph( new GraphData(this,"sin",x,Math.sin(x+delta)) ); forwardGraph( new GraphData(this,"cos",x,Math.cos(x+delta)) ); forwardGraph( new GraphDisplay(this) ); try { thread.sleep(100); } catch (Exception e) {} } forwardGraph( new GraphDisplay(this) ); delta += 0.1; // try { thread.sleep(1000); } catch (Exception e) {} // } } /** Javabeans graph event output: List of * event listeners. */ private List graphListeners = new ArrayList(); /** Adds a graph listener */ public synchronized void addGraphListener(GraphListener l) { graphListeners.add(l); } /** Removes a graph listener */ public synchronized void removeGraphListener(GraphListener l) { graphListeners.remove(l); } /** Forwards a graph event to anyone listening. */ public void forwardGraph(GraphEventObject e) { List l; // GraphEventObject weo = new GraphEventObject(this,e); synchronized(this) { l = (List)((ArrayList)graphListeners).clone(); } for (int i=0; i<l.size(); i++) { GraphListener wl = (GraphListener) l.get(i); wl.handleGraph(e); } } }
mzuhri/salam
salam/salam-external-lib/src/main/java/eduni/simdiag/GraphEqn.java
Java
apache-2.0
2,102
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_71) on Tue Jan 06 21:14:27 PST 2015 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>Uses of Interface org.glyptodon.guacamole.net.event.listener.TunnelConnectListener (guacamole-ext 0.9.4 API)</title> <meta name="date" content="2015-01-06"> <link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.glyptodon.guacamole.net.event.listener.TunnelConnectListener (guacamole-ext 0.9.4 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/glyptodon/guacamole/net/event/listener/TunnelConnectListener.html" title="interface in org.glyptodon.guacamole.net.event.listener">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/glyptodon/guacamole/net/event/listener/class-use/TunnelConnectListener.html" target="_top">Frames</a></li> <li><a href="TunnelConnectListener.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.glyptodon.guacamole.net.event.listener.TunnelConnectListener" class="title">Uses of Interface<br>org.glyptodon.guacamole.net.event.listener.TunnelConnectListener</h2> </div> <div class="classUseContainer">No usage of org.glyptodon.guacamole.net.event.listener.TunnelConnectListener</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/glyptodon/guacamole/net/event/listener/TunnelConnectListener.html" title="interface in org.glyptodon.guacamole.net.event.listener">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/glyptodon/guacamole/net/event/listener/class-use/TunnelConnectListener.html" target="_top">Frames</a></li> <li><a href="TunnelConnectListener.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2015. All rights reserved.</small></p> <!-- Google Analytics --> <script type="text/javascript"> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-75289145-1', 'auto'); ga('send', 'pageview'); </script> <!-- End Google Analytics --> </body> </html>
mike-jumper/incubator-guacamole-website
doc/0.9.4/guacamole-ext/org/glyptodon/guacamole/net/event/listener/class-use/TunnelConnectListener.html
HTML
apache-2.0
5,237
package com.seven.designbox.designpatterns.weatherInfo; import android.os.Parcel; import android.os.Parcelable; public class Weather implements Parcelable{ public String imageUrl; public Condition condition = new Condition(); public Wind wind = new Wind(); public Atmosphere atmosphere = new Atmosphere(); public Forecast forecast = new Forecast(); public Location location = new Location(); public Astronomy astronomy = new Astronomy(); public Units units = new Units(); public String lastUpdate; // public Weather(Parcel source) { // condition.code = source.readInt(); // condition.temp = source.readInt(); // condition.dotCode = source.readInt(); // } public static final Creator<Weather> CREATOR = new Creator<Weather>() { @Override public Weather createFromParcel(Parcel source) { Weather weather = new Weather(); weather.condition.code = source.readInt(); weather.condition.temp = source.readInt(); weather.condition.dotCode = source.readInt(); return weather; } @Override public Weather[] newArray(int size) { return new Weather[size]; } }; public class Condition { public String description; public int code; public String date; public int temp; //add by seven public int dotCode; } public class Forecast { public int tempMin; public int tempMax; public String description; public int code; } public static class Atmosphere { public int humidity; public float visibility; public float pressure; public int rising; } public class Wind { public int chill; public int direction; public int speed; } public class Units { public String speed; public String distance; public String pressure; public String temperature; } public class Location { public String name; public String region; public String country; } public class Astronomy { public String sunRise; public String sunSet; } @Override public int describeContents() { // TODO Auto-generated method stub return 0; } @Override public void writeToParcel(Parcel dest, int flags) { // TODO Auto-generated method stub dest.writeInt(condition.code); dest.writeInt(condition.temp); dest.writeInt(condition.dotCode); } }
yihongyuelan/DesignBox
app/src/main/java/com/seven/designbox/designpatterns/weatherInfo/Weather.java
Java
apache-2.0
2,619
<?php /** * This example gets all companies. To create companies, run * CreateCompanies.php. * * Tags: CompanyService.getCompaniesByStatement * * PHP version 5 * * Copyright 2014, Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @package GoogleApiAdsDfp * @subpackage v201411 * @category WebServices * @copyright 2014, Google Inc. All Rights Reserved. * @license http://www.apache.org/licenses/LICENSE-2.0 Apache License, * Version 2.0 * @author Vincent Tsao */ error_reporting(E_STRICT | E_ALL); // You can set the include path to src directory or reference // DfpUser.php directly via require_once. // $path = '/path/to/dfp_api_php_lib/src'; $path = dirname(__FILE__) . '/../../../../src'; set_include_path(get_include_path() . PATH_SEPARATOR . $path); require_once 'Google/Api/Ads/Dfp/Lib/DfpUser.php'; require_once 'Google/Api/Ads/Dfp/Util/StatementBuilder.php'; require_once dirname(__FILE__) . '/../../../Common/ExampleUtils.php'; try { // Get DfpUser from credentials in "../auth.ini" // relative to the DfpUser.php file's directory. $user = new DfpUser(); // Log SOAP XML request and response. $user->LogDefaults(); // Get the CompanyService. $companyService = $user->GetService('CompanyService', 'v201411'); // Create a statement to select all companies. $statementBuilder = new StatementBuilder(); $statementBuilder->OrderBy('id ASC') ->Limit(StatementBuilder::SUGGESTED_PAGE_LIMIT); // Default for total result set size. $totalResultSetSize = 0; do { // Get companies by statement. $page = $companyService->getCompaniesByStatement( $statementBuilder->ToStatement()); // Display results. if (isset($page->results)) { $totalResultSetSize = $page->totalResultSetSize; $i = $page->startIndex; foreach ($page->results as $company) { printf("%d) Company with ID %d, name '%s', and type %s was found.\n", $i++, $company->id, $company->name, $company->type); } } $statementBuilder->IncreaseOffsetBy(StatementBuilder::SUGGESTED_PAGE_LIMIT); } while ($statementBuilder->GetOffset() < $totalResultSetSize); printf("Number of results found: %d\n", $totalResultSetSize); } catch (OAuth2Exception $e) { ExampleUtils::CheckForOAuth2Errors($e); } catch (ValidationException $e) { ExampleUtils::CheckForOAuth2Errors($e); } catch (Exception $e) { printf("%s\n", $e->getMessage()); }
classano/googleads
examples/Dfp/v201411/CompanyService/GetAllCompanies.php
PHP
apache-2.0
2,994
--- title: "数据类型" nav-parent_id: python_datastream_api nav-pos: 10 --- <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> 在 Apache Flink 的 Python DataStream API 中,一种数据类型描述 DataStream 生态系统中数据的类型。 数据类型可用于声明算子输入和输出的类型,并告知系统如何对数据进行序列化。 * This will be replaced by the TOC {:toc} ## Pickle 序列化 如果类型没有被定义,数据将使用 Pickle 进行序列化和反序列化。 例如,以下程序没有指定数据类型。 {% highlight python %} from pyflink.datastream import StreamExecutionEnvironment def processing(): env = StreamExecutionEnvironment.get_execution_environment() env.set_parallelism(1) env.from_collection(collection=[(1, 'aaa'), (2, 'bbb')]) \ .map(lambda record: (record[0]+1, record[1].upper())) \ .print() # note: print to stdout on the worker machine env.execute() if __name__ == '__main__': processing() {% endhighlight %} 但是,在下列情况下需要指定类型: - 将 Python 数据发送给 Java。 - 提高序列化和反序列化的性能。 ### 发送 Python 数据给 Java 由于 Java 算子或函数不能识别 Python 数据,因此需要提供数据类型来将 Python 类型转换为 Java 类型以进行处理。 例如,如果你想要使用 Java 实现的 StreamingFileSink 输出数据,则需要提供数据类型。 {% highlight python %} from pyflink.common.serialization import SimpleStringEncoder from pyflink.common.typeinfo import Types from pyflink.datastream import StreamExecutionEnvironment from pyflink.datastream.connectors import StreamingFileSink def streaming_file_sink(): env = StreamExecutionEnvironment.get_execution_environment() env.set_parallelism(1) env.from_collection(collection=[(1, 'aaa'), (2, 'bbb')]) \ .map(lambda record: (record[0]+1, record[1].upper()), result_type=Types.ROW([Types.INT(), Types.STRING()])) \ .add_sink(StreamingFileSink .for_row_format('/tmp/output', SimpleStringEncoder()) .build()) env.execute() if __name__ == '__main__': streaming_file_sink() {% endhighlight %} ### 提高序列化和反序列化的性能 尽管可以通过 Pickle 序列化和反序列化数据,但是如果提供了确定的类型,性能会更好。 当在 pipeline 中传递数据时,显式类型允许 PyFlink 使用更高效的序列化器。 ## 支持的数据类型 你可以使用 `pyflink.common.typeinfo.Types` 在 Python DataStream API 中指定类型. 下面列出了现在支持的类型以及如何定义它们: | PyFlink 类型 | 使用 | 对应 Python 类型 | |:-----------------|:-----------------------|:-----------------------| | `BOOLEAN` | `Types.BOOLEAN()` | `bool` | | `SHORT` | `Types.SHORT()` | `int` | | `INT` | `Types.INT()` | `int` | | `LONG` | `Types.LONG()` | `int` | | `FLOAT` | `Types.FLOAT()` | `float` | | `DOUBLE` | `Types.DOUBLE()` | `float` | | `CHAR` | `Types.CHAR()` | `str` | | `BIG_INT` | `Types.BIG_INT()` | `bytes` | | `BIG_DEC` | `Types.BIG_DEC()` | `decimal.Decimal` | | `STRING` | `Types.STRING()` | `str` | | `BYTE` | `Types.BYTE()` | `int` | | `TUPLE` | `Types.TUPLE()` | `tuple` | | `PRIMITIVE_ARRAY` | `Types.PRIMITIVE_ARRAY()` | `list` | | `ROW` | `Types.ROW()` | `dict` |
aljoscha/flink
docs/dev/python/datastream-api-users-guide/data_types.zh.md
Markdown
apache-2.0
4,099
cdi-alternative: Demostrates CDI Alternatives ====================================================== Author: Nevin Zhu Level: Intermediate Technologies: CDI, Servlet, JSP Summary: Demonstrates the use of CDI Alternatives where the bean is selected during deployment Target Product: EAP Product Versions: EAP 6.1, EAP 6.2 Source: <https://github.com/jboss-developer/jboss-eap-quickstarts/> What is it? ----------- When more than one version of a bean is implemented for different purposes, the ability to switch between the versions during the development phase by injecting one qualifier or another is shown in this demo. Instead of having to change the source code of the application, one can make the choice at deployment time by using alternatives. Alternatives are commonly used for purposes like the following: 1. To handle client-specific business logic that is determined at runtime. 2. To specify beans that are valid for a particular deployment scenario, for example, when country-specific sales tax laws require country-specific sales tax business logic. 3. To create dummy or mock versions of beans to be used for testing. Any java class which has a no-args constructor and is in an archive with a beans.xml is available for lookup and injection. For EL resolution, it must contain @Named System requirements ------------------- The application this project produces is designed to be run on Red Hat JBoss Enterprise Application Platform 6.1 or later. All you need to build this project is Java 6.0 (Java SDK 1.6) or later, Maven 3.0 or later. Configure Maven --------------- If you have not yet done so, you must [Configure Maven](../README.md#configure-maven) before testing the quickstarts. Start the JBoss Server ------------------------- 1. Open a command line and navigate to the root of the JBoss server directory. 2. The following shows the command line to start the server: For Linux: JBOSS_HOME/bin/standalone.sh For Windows: JBOSS_HOME\bin\standalone.bat Build and Deploy the Quickstart ------------------------- _NOTE: The following build command assumes you have configured your Maven user settings. If you have not, you must include Maven setting arguments on the command line. See [Build and Deploy the Quickstarts](../README.md#build-and-deploy-the-quickstarts) for complete instructions and additional options._ 1. Make sure you have started the JBoss Server as described above. 2. Open a command line and navigate to the root directory of this quickstart. 3. Type this command to build and deploy the archive: mvn clean install jboss-as:deploy 4. This will deploy `target/jboss-cdi-alternative.ear` to the running instance of the server. Access the application --------------------- The application will be running at the following URL: <http://localhost:8080/jboss-cdi-alternative>. You can specify alternative versions of the bean in the WEB-INF/beans.xml file by doing one of the following: 1. you can remove the '<alternatives>' tag 2. you can change the class name. In this quickstart, in order to switch back to the default implementation, comment the '<alternatives>' block in the WEB-INF/beans.xml file and redeploy the quickstart. Undeploy the Archive -------------------- 1. Make sure you have started the JBoss Server as described above. 2. Open a command line and navigate to the root directory of this quickstart. 3. When you are finished testing, type this command to undeploy the archive: mvn jboss-as:undeploy Run the Quickstart in JBoss Developer Studio or Eclipse ------------------------------------- You can also start the server and deploy the quickstarts from Eclipse using JBoss tools. For more information, see [Use JBoss Developer Studio or Eclipse to Run the Quickstarts](../README.md#use-jboss-developer-studio-or-eclipse-to-run-the-quickstarts) Debug the Application ------------------------------------ If you want to debug the source code or look at the Javadocs of any library in the project, run either of the following commands to pull them into your local repository. The IDE should then detect them. mvn dependency:sources mvn dependency:resolve -Dclassifier=javadoc
wfink/jboss-as-quickstart
cdi-alternative/README.md
Markdown
apache-2.0
4,225
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; const models = require('./index'); /** * Application Upgrade Rollback Completed event. * * @extends models['ApplicationEvent'] */ class ApplicationUpgradeRollbackCompletedEvent extends models['ApplicationEvent'] { /** * Create a ApplicationUpgradeRollbackCompletedEvent. * @property {string} applicationTypeName Application type name. * @property {string} applicationTypeVersion Application type version. * @property {string} failureReason Describes reason of failure. * @property {number} overallUpgradeElapsedTimeInMs Overall upgrade time in * milli-seconds. */ constructor() { super(); } /** * Defines the metadata of ApplicationUpgradeRollbackCompletedEvent * * @returns {object} metadata of ApplicationUpgradeRollbackCompletedEvent * */ mapper() { return { required: false, serializedName: 'ApplicationUpgradeRollbackCompleted', type: { name: 'Composite', polymorphicDiscriminator: { serializedName: 'Kind', clientName: 'kind' }, uberParent: 'FabricEvent', className: 'ApplicationUpgradeRollbackCompletedEvent', modelProperties: { eventInstanceId: { required: true, serializedName: 'EventInstanceId', type: { name: 'String' } }, category: { required: false, serializedName: 'Category', type: { name: 'String' } }, timeStamp: { required: true, serializedName: 'TimeStamp', type: { name: 'DateTime' } }, hasCorrelatedEvents: { required: false, serializedName: 'HasCorrelatedEvents', type: { name: 'Boolean' } }, kind: { required: true, serializedName: 'Kind', isPolymorphicDiscriminator: true, type: { name: 'String' } }, applicationId: { required: true, serializedName: 'ApplicationId', type: { name: 'String' } }, applicationTypeName: { required: true, serializedName: 'ApplicationTypeName', type: { name: 'String' } }, applicationTypeVersion: { required: true, serializedName: 'ApplicationTypeVersion', type: { name: 'String' } }, failureReason: { required: true, serializedName: 'FailureReason', type: { name: 'String' } }, overallUpgradeElapsedTimeInMs: { required: true, serializedName: 'OverallUpgradeElapsedTimeInMs', type: { name: 'Number' } } } } }; } } module.exports = ApplicationUpgradeRollbackCompletedEvent;
xingwu1/azure-sdk-for-node
lib/services/serviceFabric/lib/models/applicationUpgradeRollbackCompletedEvent.js
JavaScript
apache-2.0
3,459
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; const msRest = require('ms-rest'); const msRestAzure = require('ms-rest-azure'); const WebResource = msRest.WebResource; /** * Retrieve the job stream identified by job stream id. * * @param {string} resourceGroupName Name of an Azure Resource group. * * @param {string} automationAccountName The name of the automation account. * * @param {string} jobName The job name. * * @param {string} jobStreamId The job stream id. * * @param {object} [options] Optional Parameters. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {function} callback - The callback. * * @returns {function} callback(err, result, request, response) * * {Error} err - The Error object if an error occurred, null otherwise. * * {object} [result] - The deserialized result object if an error did not occur. * See {@link JobStream} for more information. * * {object} [request] - The HTTP Request object if an error did not occur. * * {stream} [response] - The HTTP Response stream if an error did not occur. */ function _get(resourceGroupName, automationAccountName, jobName, jobStreamId, options, callback) { /* jshint validthis: true */ let client = this.client; if(!callback && typeof options === 'function') { callback = options; options = null; } if (!callback) { throw new Error('callback cannot be null.'); } let clientRequestId = (options && options.clientRequestId !== undefined) ? options.clientRequestId : undefined; let apiVersion = '2017-05-15-preview'; // Validate try { if (this.client.subscriptionId === null || this.client.subscriptionId === undefined || typeof this.client.subscriptionId.valueOf() !== 'string') { throw new Error('this.client.subscriptionId cannot be null or undefined and it must be of type string.'); } if (resourceGroupName === null || resourceGroupName === undefined || typeof resourceGroupName.valueOf() !== 'string') { throw new Error('resourceGroupName cannot be null or undefined and it must be of type string.'); } if (resourceGroupName !== null && resourceGroupName !== undefined) { if (resourceGroupName.length > 90) { throw new Error('"resourceGroupName" should satisfy the constraint - "MaxLength": 90'); } if (resourceGroupName.length < 1) { throw new Error('"resourceGroupName" should satisfy the constraint - "MinLength": 1'); } if (resourceGroupName.match(/^[-\w\._]+$/) === null) { throw new Error('"resourceGroupName" should satisfy the constraint - "Pattern": /^[-\w\._]+$/'); } } if (automationAccountName === null || automationAccountName === undefined || typeof automationAccountName.valueOf() !== 'string') { throw new Error('automationAccountName cannot be null or undefined and it must be of type string.'); } if (jobName === null || jobName === undefined || typeof jobName.valueOf() !== 'string') { throw new Error('jobName cannot be null or undefined and it must be of type string.'); } if (jobStreamId === null || jobStreamId === undefined || typeof jobStreamId.valueOf() !== 'string') { throw new Error('jobStreamId cannot be null or undefined and it must be of type string.'); } if (clientRequestId !== null && clientRequestId !== undefined && typeof clientRequestId.valueOf() !== 'string') { throw new Error('clientRequestId must be of type string.'); } if (this.client.acceptLanguage !== null && this.client.acceptLanguage !== undefined && typeof this.client.acceptLanguage.valueOf() !== 'string') { throw new Error('this.client.acceptLanguage must be of type string.'); } } catch (error) { return callback(error); } // Construct URL let baseUrl = this.client.baseUri; let requestUrl = baseUrl + (baseUrl.endsWith('/') ? '' : '/') + 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/jobs/{jobName}/streams/{jobStreamId}'; requestUrl = requestUrl.replace('{subscriptionId}', encodeURIComponent(this.client.subscriptionId)); requestUrl = requestUrl.replace('{resourceGroupName}', encodeURIComponent(resourceGroupName)); requestUrl = requestUrl.replace('{automationAccountName}', encodeURIComponent(automationAccountName)); requestUrl = requestUrl.replace('{jobName}', encodeURIComponent(jobName)); requestUrl = requestUrl.replace('{jobStreamId}', encodeURIComponent(jobStreamId)); let queryParameters = []; queryParameters.push('api-version=' + encodeURIComponent(apiVersion)); if (queryParameters.length > 0) { requestUrl += '?' + queryParameters.join('&'); } // Create HTTP transport objects let httpRequest = new WebResource(); httpRequest.method = 'GET'; httpRequest.url = requestUrl; httpRequest.headers = {}; // Set Headers httpRequest.headers['Content-Type'] = 'application/json; charset=utf-8'; if (this.client.generateClientRequestId) { httpRequest.headers['x-ms-client-request-id'] = msRestAzure.generateUuid(); } if (clientRequestId !== undefined && clientRequestId !== null) { httpRequest.headers['clientRequestId'] = clientRequestId; } if (this.client.acceptLanguage !== undefined && this.client.acceptLanguage !== null) { httpRequest.headers['accept-language'] = this.client.acceptLanguage; } if(options) { for(let headerName in options['customHeaders']) { if (options['customHeaders'].hasOwnProperty(headerName)) { httpRequest.headers[headerName] = options['customHeaders'][headerName]; } } } httpRequest.body = null; // Send Request return client.pipeline(httpRequest, (err, response, responseBody) => { if (err) { return callback(err); } let statusCode = response.statusCode; if (statusCode !== 200) { let error = new Error(responseBody); error.statusCode = response.statusCode; error.request = msRest.stripRequest(httpRequest); error.response = msRest.stripResponse(response); if (responseBody === '') responseBody = null; let parsedErrorResponse; try { parsedErrorResponse = JSON.parse(responseBody); if (parsedErrorResponse) { let internalError = null; if (parsedErrorResponse.error) internalError = parsedErrorResponse.error; error.code = internalError ? internalError.code : parsedErrorResponse.code; error.message = internalError ? internalError.message : parsedErrorResponse.message; } if (parsedErrorResponse !== null && parsedErrorResponse !== undefined) { let resultMapper = new client.models['ErrorResponse']().mapper(); error.body = client.deserialize(resultMapper, parsedErrorResponse, 'error.body'); } } catch (defaultError) { error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody ` + `- "${responseBody}" for the default response.`; return callback(error); } return callback(error); } // Create Result let result = null; if (responseBody === '') responseBody = null; // Deserialize Response if (statusCode === 200) { let parsedResponse = null; try { parsedResponse = JSON.parse(responseBody); result = JSON.parse(responseBody); if (parsedResponse !== null && parsedResponse !== undefined) { let resultMapper = new client.models['JobStream']().mapper(); result = client.deserialize(resultMapper, parsedResponse, 'result'); } } catch (error) { let deserializationError = new Error(`Error ${error} occurred in deserializing the responseBody - ${responseBody}`); deserializationError.request = msRest.stripRequest(httpRequest); deserializationError.response = msRest.stripResponse(response); return callback(deserializationError); } } return callback(null, result, httpRequest, response); }); } /** * Retrieve a list of jobs streams identified by job name. * * @param {string} resourceGroupName Name of an Azure Resource group. * * @param {string} automationAccountName The name of the automation account. * * @param {string} jobName The job name. * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {function} callback - The callback. * * @returns {function} callback(err, result, request, response) * * {Error} err - The Error object if an error occurred, null otherwise. * * {object} [result] - The deserialized result object if an error did not occur. * See {@link JobStreamListResult} for more information. * * {object} [request] - The HTTP Request object if an error did not occur. * * {stream} [response] - The HTTP Response stream if an error did not occur. */ function _listByJob(resourceGroupName, automationAccountName, jobName, options, callback) { /* jshint validthis: true */ let client = this.client; if(!callback && typeof options === 'function') { callback = options; options = null; } if (!callback) { throw new Error('callback cannot be null.'); } let filter = (options && options.filter !== undefined) ? options.filter : undefined; let clientRequestId = (options && options.clientRequestId !== undefined) ? options.clientRequestId : undefined; let apiVersion = '2017-05-15-preview'; // Validate try { if (resourceGroupName === null || resourceGroupName === undefined || typeof resourceGroupName.valueOf() !== 'string') { throw new Error('resourceGroupName cannot be null or undefined and it must be of type string.'); } if (resourceGroupName !== null && resourceGroupName !== undefined) { if (resourceGroupName.length > 90) { throw new Error('"resourceGroupName" should satisfy the constraint - "MaxLength": 90'); } if (resourceGroupName.length < 1) { throw new Error('"resourceGroupName" should satisfy the constraint - "MinLength": 1'); } if (resourceGroupName.match(/^[-\w\._]+$/) === null) { throw new Error('"resourceGroupName" should satisfy the constraint - "Pattern": /^[-\w\._]+$/'); } } if (automationAccountName === null || automationAccountName === undefined || typeof automationAccountName.valueOf() !== 'string') { throw new Error('automationAccountName cannot be null or undefined and it must be of type string.'); } if (jobName === null || jobName === undefined || typeof jobName.valueOf() !== 'string') { throw new Error('jobName cannot be null or undefined and it must be of type string.'); } if (filter !== null && filter !== undefined && typeof filter.valueOf() !== 'string') { throw new Error('filter must be of type string.'); } if (this.client.subscriptionId === null || this.client.subscriptionId === undefined || typeof this.client.subscriptionId.valueOf() !== 'string') { throw new Error('this.client.subscriptionId cannot be null or undefined and it must be of type string.'); } if (clientRequestId !== null && clientRequestId !== undefined && typeof clientRequestId.valueOf() !== 'string') { throw new Error('clientRequestId must be of type string.'); } if (this.client.acceptLanguage !== null && this.client.acceptLanguage !== undefined && typeof this.client.acceptLanguage.valueOf() !== 'string') { throw new Error('this.client.acceptLanguage must be of type string.'); } } catch (error) { return callback(error); } // Construct URL let baseUrl = this.client.baseUri; let requestUrl = baseUrl + (baseUrl.endsWith('/') ? '' : '/') + 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/jobs/{jobName}/streams'; requestUrl = requestUrl.replace('{resourceGroupName}', encodeURIComponent(resourceGroupName)); requestUrl = requestUrl.replace('{automationAccountName}', encodeURIComponent(automationAccountName)); requestUrl = requestUrl.replace('{jobName}', encodeURIComponent(jobName)); requestUrl = requestUrl.replace('{subscriptionId}', encodeURIComponent(this.client.subscriptionId)); let queryParameters = []; if (filter !== null && filter !== undefined) { queryParameters.push('$filter=' + encodeURIComponent(filter)); } queryParameters.push('api-version=' + encodeURIComponent(apiVersion)); if (queryParameters.length > 0) { requestUrl += '?' + queryParameters.join('&'); } // Create HTTP transport objects let httpRequest = new WebResource(); httpRequest.method = 'GET'; httpRequest.url = requestUrl; httpRequest.headers = {}; // Set Headers httpRequest.headers['Content-Type'] = 'application/json; charset=utf-8'; if (this.client.generateClientRequestId) { httpRequest.headers['x-ms-client-request-id'] = msRestAzure.generateUuid(); } if (clientRequestId !== undefined && clientRequestId !== null) { httpRequest.headers['clientRequestId'] = clientRequestId; } if (this.client.acceptLanguage !== undefined && this.client.acceptLanguage !== null) { httpRequest.headers['accept-language'] = this.client.acceptLanguage; } if(options) { for(let headerName in options['customHeaders']) { if (options['customHeaders'].hasOwnProperty(headerName)) { httpRequest.headers[headerName] = options['customHeaders'][headerName]; } } } httpRequest.body = null; // Send Request return client.pipeline(httpRequest, (err, response, responseBody) => { if (err) { return callback(err); } let statusCode = response.statusCode; if (statusCode !== 200) { let error = new Error(responseBody); error.statusCode = response.statusCode; error.request = msRest.stripRequest(httpRequest); error.response = msRest.stripResponse(response); if (responseBody === '') responseBody = null; let parsedErrorResponse; try { parsedErrorResponse = JSON.parse(responseBody); if (parsedErrorResponse) { let internalError = null; if (parsedErrorResponse.error) internalError = parsedErrorResponse.error; error.code = internalError ? internalError.code : parsedErrorResponse.code; error.message = internalError ? internalError.message : parsedErrorResponse.message; } if (parsedErrorResponse !== null && parsedErrorResponse !== undefined) { let resultMapper = new client.models['ErrorResponse']().mapper(); error.body = client.deserialize(resultMapper, parsedErrorResponse, 'error.body'); } } catch (defaultError) { error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody ` + `- "${responseBody}" for the default response.`; return callback(error); } return callback(error); } // Create Result let result = null; if (responseBody === '') responseBody = null; // Deserialize Response if (statusCode === 200) { let parsedResponse = null; try { parsedResponse = JSON.parse(responseBody); result = JSON.parse(responseBody); if (parsedResponse !== null && parsedResponse !== undefined) { let resultMapper = new client.models['JobStreamListResult']().mapper(); result = client.deserialize(resultMapper, parsedResponse, 'result'); } } catch (error) { let deserializationError = new Error(`Error ${error} occurred in deserializing the responseBody - ${responseBody}`); deserializationError.request = msRest.stripRequest(httpRequest); deserializationError.response = msRest.stripResponse(response); return callback(deserializationError); } } return callback(null, result, httpRequest, response); }); } /** * Retrieve a list of jobs streams identified by job name. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {function} callback - The callback. * * @returns {function} callback(err, result, request, response) * * {Error} err - The Error object if an error occurred, null otherwise. * * {object} [result] - The deserialized result object if an error did not occur. * See {@link JobStreamListResult} for more information. * * {object} [request] - The HTTP Request object if an error did not occur. * * {stream} [response] - The HTTP Response stream if an error did not occur. */ function _listByJobNext(nextPageLink, options, callback) { /* jshint validthis: true */ let client = this.client; if(!callback && typeof options === 'function') { callback = options; options = null; } if (!callback) { throw new Error('callback cannot be null.'); } let clientRequestId = (options && options.clientRequestId !== undefined) ? options.clientRequestId : undefined; // Validate try { if (nextPageLink === null || nextPageLink === undefined || typeof nextPageLink.valueOf() !== 'string') { throw new Error('nextPageLink cannot be null or undefined and it must be of type string.'); } if (clientRequestId !== null && clientRequestId !== undefined && typeof clientRequestId.valueOf() !== 'string') { throw new Error('clientRequestId must be of type string.'); } if (this.client.acceptLanguage !== null && this.client.acceptLanguage !== undefined && typeof this.client.acceptLanguage.valueOf() !== 'string') { throw new Error('this.client.acceptLanguage must be of type string.'); } } catch (error) { return callback(error); } // Construct URL let requestUrl = '{nextLink}'; requestUrl = requestUrl.replace('{nextLink}', nextPageLink); // Create HTTP transport objects let httpRequest = new WebResource(); httpRequest.method = 'GET'; httpRequest.url = requestUrl; httpRequest.headers = {}; // Set Headers httpRequest.headers['Content-Type'] = 'application/json; charset=utf-8'; if (this.client.generateClientRequestId) { httpRequest.headers['x-ms-client-request-id'] = msRestAzure.generateUuid(); } if (clientRequestId !== undefined && clientRequestId !== null) { httpRequest.headers['clientRequestId'] = clientRequestId; } if (this.client.acceptLanguage !== undefined && this.client.acceptLanguage !== null) { httpRequest.headers['accept-language'] = this.client.acceptLanguage; } if(options) { for(let headerName in options['customHeaders']) { if (options['customHeaders'].hasOwnProperty(headerName)) { httpRequest.headers[headerName] = options['customHeaders'][headerName]; } } } httpRequest.body = null; // Send Request return client.pipeline(httpRequest, (err, response, responseBody) => { if (err) { return callback(err); } let statusCode = response.statusCode; if (statusCode !== 200) { let error = new Error(responseBody); error.statusCode = response.statusCode; error.request = msRest.stripRequest(httpRequest); error.response = msRest.stripResponse(response); if (responseBody === '') responseBody = null; let parsedErrorResponse; try { parsedErrorResponse = JSON.parse(responseBody); if (parsedErrorResponse) { let internalError = null; if (parsedErrorResponse.error) internalError = parsedErrorResponse.error; error.code = internalError ? internalError.code : parsedErrorResponse.code; error.message = internalError ? internalError.message : parsedErrorResponse.message; } if (parsedErrorResponse !== null && parsedErrorResponse !== undefined) { let resultMapper = new client.models['ErrorResponse']().mapper(); error.body = client.deserialize(resultMapper, parsedErrorResponse, 'error.body'); } } catch (defaultError) { error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody ` + `- "${responseBody}" for the default response.`; return callback(error); } return callback(error); } // Create Result let result = null; if (responseBody === '') responseBody = null; // Deserialize Response if (statusCode === 200) { let parsedResponse = null; try { parsedResponse = JSON.parse(responseBody); result = JSON.parse(responseBody); if (parsedResponse !== null && parsedResponse !== undefined) { let resultMapper = new client.models['JobStreamListResult']().mapper(); result = client.deserialize(resultMapper, parsedResponse, 'result'); } } catch (error) { let deserializationError = new Error(`Error ${error} occurred in deserializing the responseBody - ${responseBody}`); deserializationError.request = msRest.stripRequest(httpRequest); deserializationError.response = msRest.stripResponse(response); return callback(deserializationError); } } return callback(null, result, httpRequest, response); }); } /** Class representing a JobStreamOperations. */ class JobStreamOperations { /** * Create a JobStreamOperations. * @param {AutomationClient} client Reference to the service client. */ constructor(client) { this.client = client; this._get = _get; this._listByJob = _listByJob; this._listByJobNext = _listByJobNext; } /** * Retrieve the job stream identified by job stream id. * * @param {string} resourceGroupName Name of an Azure Resource group. * * @param {string} automationAccountName The name of the automation account. * * @param {string} jobName The job name. * * @param {string} jobStreamId The job stream id. * * @param {object} [options] Optional Parameters. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<JobStream>} - The deserialized result object. * * @reject {Error} - The error object. */ getWithHttpOperationResponse(resourceGroupName, automationAccountName, jobName, jobStreamId, options) { let client = this.client; let self = this; return new Promise((resolve, reject) => { self._get(resourceGroupName, automationAccountName, jobName, jobStreamId, options, (err, result, request, response) => { let httpOperationResponse = new msRest.HttpOperationResponse(request, response); httpOperationResponse.body = result; if (err) { reject(err); } else { resolve(httpOperationResponse); } return; }); }); } /** * Retrieve the job stream identified by job stream id. * * @param {string} resourceGroupName Name of an Azure Resource group. * * @param {string} automationAccountName The name of the automation account. * * @param {string} jobName The job name. * * @param {string} jobStreamId The job stream id. * * @param {object} [options] Optional Parameters. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {function} [optionalCallback] - The optional callback. * * @returns {function|Promise} If a callback was passed as the last parameter * then it returns the callback else returns a Promise. * * {Promise} A promise is returned * * @resolve {JobStream} - The deserialized result object. * * @reject {Error} - The error object. * * {function} optionalCallback(err, result, request, response) * * {Error} err - The Error object if an error occurred, null otherwise. * * {object} [result] - The deserialized result object if an error did not occur. * See {@link JobStream} for more information. * * {object} [request] - The HTTP Request object if an error did not occur. * * {stream} [response] - The HTTP Response stream if an error did not occur. */ get(resourceGroupName, automationAccountName, jobName, jobStreamId, options, optionalCallback) { let client = this.client; let self = this; if (!optionalCallback && typeof options === 'function') { optionalCallback = options; options = null; } if (!optionalCallback) { return new Promise((resolve, reject) => { self._get(resourceGroupName, automationAccountName, jobName, jobStreamId, options, (err, result, request, response) => { if (err) { reject(err); } else { resolve(result); } return; }); }); } else { return self._get(resourceGroupName, automationAccountName, jobName, jobStreamId, options, optionalCallback); } } /** * Retrieve a list of jobs streams identified by job name. * * @param {string} resourceGroupName Name of an Azure Resource group. * * @param {string} automationAccountName The name of the automation account. * * @param {string} jobName The job name. * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<JobStreamListResult>} - The deserialized result object. * * @reject {Error} - The error object. */ listByJobWithHttpOperationResponse(resourceGroupName, automationAccountName, jobName, options) { let client = this.client; let self = this; return new Promise((resolve, reject) => { self._listByJob(resourceGroupName, automationAccountName, jobName, options, (err, result, request, response) => { let httpOperationResponse = new msRest.HttpOperationResponse(request, response); httpOperationResponse.body = result; if (err) { reject(err); } else { resolve(httpOperationResponse); } return; }); }); } /** * Retrieve a list of jobs streams identified by job name. * * @param {string} resourceGroupName Name of an Azure Resource group. * * @param {string} automationAccountName The name of the automation account. * * @param {string} jobName The job name. * * @param {object} [options] Optional Parameters. * * @param {string} [options.filter] The filter to apply on the operation. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {function} [optionalCallback] - The optional callback. * * @returns {function|Promise} If a callback was passed as the last parameter * then it returns the callback else returns a Promise. * * {Promise} A promise is returned * * @resolve {JobStreamListResult} - The deserialized result object. * * @reject {Error} - The error object. * * {function} optionalCallback(err, result, request, response) * * {Error} err - The Error object if an error occurred, null otherwise. * * {object} [result] - The deserialized result object if an error did not occur. * See {@link JobStreamListResult} for more information. * * {object} [request] - The HTTP Request object if an error did not occur. * * {stream} [response] - The HTTP Response stream if an error did not occur. */ listByJob(resourceGroupName, automationAccountName, jobName, options, optionalCallback) { let client = this.client; let self = this; if (!optionalCallback && typeof options === 'function') { optionalCallback = options; options = null; } if (!optionalCallback) { return new Promise((resolve, reject) => { self._listByJob(resourceGroupName, automationAccountName, jobName, options, (err, result, request, response) => { if (err) { reject(err); } else { resolve(result); } return; }); }); } else { return self._listByJob(resourceGroupName, automationAccountName, jobName, options, optionalCallback); } } /** * Retrieve a list of jobs streams identified by job name. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @returns {Promise} A promise is returned * * @resolve {HttpOperationResponse<JobStreamListResult>} - The deserialized result object. * * @reject {Error} - The error object. */ listByJobNextWithHttpOperationResponse(nextPageLink, options) { let client = this.client; let self = this; return new Promise((resolve, reject) => { self._listByJobNext(nextPageLink, options, (err, result, request, response) => { let httpOperationResponse = new msRest.HttpOperationResponse(request, response); httpOperationResponse.body = result; if (err) { reject(err); } else { resolve(httpOperationResponse); } return; }); }); } /** * Retrieve a list of jobs streams identified by job name. * * @param {string} nextPageLink The NextLink from the previous successful call * to List operation. * * @param {object} [options] Optional Parameters. * * @param {string} [options.clientRequestId] Identifies this specific client * request. * * @param {object} [options.customHeaders] Headers that will be added to the * request * * @param {function} [optionalCallback] - The optional callback. * * @returns {function|Promise} If a callback was passed as the last parameter * then it returns the callback else returns a Promise. * * {Promise} A promise is returned * * @resolve {JobStreamListResult} - The deserialized result object. * * @reject {Error} - The error object. * * {function} optionalCallback(err, result, request, response) * * {Error} err - The Error object if an error occurred, null otherwise. * * {object} [result] - The deserialized result object if an error did not occur. * See {@link JobStreamListResult} for more information. * * {object} [request] - The HTTP Request object if an error did not occur. * * {stream} [response] - The HTTP Response stream if an error did not occur. */ listByJobNext(nextPageLink, options, optionalCallback) { let client = this.client; let self = this; if (!optionalCallback && typeof options === 'function') { optionalCallback = options; options = null; } if (!optionalCallback) { return new Promise((resolve, reject) => { self._listByJobNext(nextPageLink, options, (err, result, request, response) => { if (err) { reject(err); } else { resolve(result); } return; }); }); } else { return self._listByJobNext(nextPageLink, options, optionalCallback); } } } module.exports = JobStreamOperations;
xingwu1/azure-sdk-for-node
lib/services/automationManagement/lib/operations/jobStreamOperations.js
JavaScript
apache-2.0
33,385
/*========================================================================= * * Copyright NumFOCUS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef itkImageRegionConstIteratorWithIndex_h #define itkImageRegionConstIteratorWithIndex_h #include "itkImageConstIteratorWithIndex.h" namespace itk { /** \class ImageRegionConstIteratorWithIndex * \brief A multi-dimensional * iterator templated over image type that walks an image region and is * specialized to keep track of its index location. * * The "WithIndex" family of iteators was designed for algorithms that use both * the values and locations of image pixels in calculations. Unlike * ImageRegionIterator, which calculates an index only when requested, * ImageRegionIteratorWithIndex maintains its index location as a member * variable that is updated during increment and decrement operations. * Iteration speed is penalized, but index queries become more efficient. * * ImageRegionConstIteratorWithIndex is a multi-dimensional iterator, * requiring more information be specified before the iterator can be * used than conventional iterators. Whereas the std::vector::iterator * from the STL only needs to be passed a pointer to establish the * iterator, the multi-dimensional image iterator needs a pointer, the * size of the buffer, the size of the region, the start index of the * buffer, and the start index of the region. To gain access to this * information, ImageRegionConstIteratorWithIndex holds a reference to the * image over which it is traversing. * * ImageRegionConstIteratorWithIndex assumes a particular layout of * the image data. The is arranged in a 1D array as if it were * [][][][slice][row][col] with Index[0] = col, Index[1] = row, * Index[2] = slice, etc. * * operator++ provides a simple syntax for walking around a region of * a multidimensional image. operator++ iterates across a row, constraining * the movement to within a region of image. When the iterator reaches * the boundary of the region along a row, the iterator automatically * wraps to the next row, starting at the first pixel in the row that is * part of the region. This allows for simple processing loops of the form: * \code IteratorType it( image, image->GetRequestedRegion() ); it.Begin(); while( ! it.IsAtEnd() ) { it.Set( 100.0 + it.Get() ); ++it; } \endcode * * It also can be used for walking in the reverse direction like * \code IteratorType it( image, image->GetRequestedRegion() ); it.End(); while( !it.IsAtBegin() ) { it.Set( 100.0 ); --it; } \endcode * * \par MORE INFORMATION * * For a complete description of the ITK Image Iterators and their API, please * see the Iterators chapter in the ITK Software Guide. The ITK Software Guide * is available in print and as a free .pdf download from https://www.itk.org. * * \ingroup ImageIterators * * \sa ImageConstIterator \sa ConditionalConstIterator * \sa ConstNeighborhoodIterator \sa ConstShapedNeighborhoodIterator * \sa ConstSliceIterator \sa CorrespondenceDataStructureIterator * \sa FloodFilledFunctionConditionalConstIterator * \sa FloodFilledImageFunctionConditionalConstIterator * \sa FloodFilledImageFunctionConditionalIterator * \sa FloodFilledSpatialFunctionConditionalConstIterator * \sa FloodFilledSpatialFunctionConditionalIterator * \sa ImageConstIterator \sa ImageConstIteratorWithIndex * \sa ImageIterator \sa ImageIteratorWithIndex * \sa ImageLinearConstIteratorWithIndex \sa ImageLinearIteratorWithIndex * \sa ImageRandomConstIteratorWithIndex \sa ImageRandomIteratorWithIndex * \sa ImageRegionConstIterator \sa ImageRegionConstIteratorWithIndex * \sa ImageRegionExclusionConstIteratorWithIndex * \sa ImageRegionExclusionIteratorWithIndex * \sa ImageRegionIterator \sa ImageRegionIteratorWithIndex * \sa ImageRegionReverseConstIterator \sa ImageRegionReverseIterator * \sa ImageReverseConstIterator \sa ImageReverseIterator * \sa ImageSliceConstIteratorWithIndex \sa ImageSliceIteratorWithIndex * \sa NeighborhoodIterator \sa PathConstIterator \sa PathIterator * \sa ShapedNeighborhoodIterator \sa SliceIterator * \sa ImageConstIteratorWithIndex * \ingroup ITKCommon * * * \sphinx * \sphinxexample{Core/Common/IterateRegionWithAccessToIndexWithoutWriteAccess,Iterate Region In Image With Access To Index Without Write Access} * \endsphinx */ template <typename TImage> class ITK_TEMPLATE_EXPORT ImageRegionConstIteratorWithIndex : public ImageConstIteratorWithIndex<TImage> { public: /** Standard class type aliases. */ using Self = ImageRegionConstIteratorWithIndex; using Superclass = ImageConstIteratorWithIndex<TImage>; /** * Index type alias support While these were already typdef'ed in the superclass * they need to be redone here for this subclass to compile properly with gcc. */ /** Types inherited from the Superclass */ using IndexType = typename Superclass::IndexType; using SizeType = typename Superclass::SizeType; using OffsetType = typename Superclass::OffsetType; using RegionType = typename Superclass::RegionType; using ImageType = typename Superclass::ImageType; using PixelContainer = typename Superclass::PixelContainer; using PixelContainerPointer = typename Superclass::PixelContainerPointer; using InternalPixelType = typename Superclass::InternalPixelType; using PixelType = typename Superclass::PixelType; using AccessorType = typename Superclass::AccessorType; /** Default constructor. Needed since we provide a cast constructor. */ ImageRegionConstIteratorWithIndex() : ImageConstIteratorWithIndex<TImage>() {} /** Constructor establishes an iterator to walk a particular image and a * particular region of that image. */ ImageRegionConstIteratorWithIndex(const TImage * ptr, const RegionType & region) : ImageConstIteratorWithIndex<TImage>(ptr, region) {} /** Constructor that can be used to cast from an ImageIterator to an * ImageRegionConstIteratorWithIndex. Many routines return an ImageIterator but for a * particular task, you may want an ImageRegionConstIteratorWithIndex. Rather than * provide overloaded APIs that return different types of Iterators, itk * returns ImageIterators and uses constructors to cast from an * ImageIterator to a ImageRegionConstIteratorWithIndex. */ ImageRegionConstIteratorWithIndex(const ImageConstIteratorWithIndex<TImage> & it) { this->ImageConstIteratorWithIndex<TImage>::operator=(it); } /** Increment (prefix) the fastest moving dimension of the iterator's index. * This operator will constrain the iterator within the region (i.e. the * iterator will automatically wrap from the end of the row of the region * to the beginning of the next row of the region) up until the iterator * tries to moves past the last pixel of the region. Here, the iterator * will be set to be one pixel past the end of the region. * \sa operator-- */ Self & operator++(); /** Decrement (prefix) the fastest moving dimension of the iterator's index. * This operator will constrain the iterator within the region (i.e. the * iterator will automatically wrap from the beginning of the row of the * region to the end of the previous row of the region) up until the iterator * tries to moves past the first pixel of the region. Here, the iterator * will be set to be one pixel past the beginning of the region. * \sa operator++ */ Self & operator--(); }; } // end namespace itk #ifndef ITK_MANUAL_INSTANTIATION # include "itkImageRegionConstIteratorWithIndex.hxx" #endif #endif
malaterre/ITK
Modules/Core/Common/include/itkImageRegionConstIteratorWithIndex.h
C
apache-2.0
8,294
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.01.16 at 12:56:36 PM IST // package org.akomantoso.schema.v3.csd11; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.namespace.QName; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;group ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11}inlineCM"/> * &lt;element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11}ul"/> * &lt;element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11}ol"/> * &lt;element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11}p"/> * &lt;/choice> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11}coreopt"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11}optvalue"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "li") public class Li { @XmlElementRefs({ @XmlElementRef(name = "b", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "session", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Session.class, required = false), @XmlElementRef(name = "embeddedText", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = EmbeddedText.class, required = false), @XmlElementRef(name = "eol", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "outcome", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "docType", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "mmod", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "docTitle", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "date", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Date.class, required = false), @XmlElementRef(name = "docPurpose", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "sub", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "mref", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "docNumber", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "relatedDocument", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = RelatedDocument.class, required = false), @XmlElementRef(name = "time", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Time.class, required = false), @XmlElementRef(name = "docDate", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = DocDate.class, required = false), @XmlElementRef(name = "mod", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "fillIn", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = FillIn.class, required = false), @XmlElementRef(name = "organization", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "location", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "opinion", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Opinion.class, required = false), @XmlElementRef(name = "concept", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "omissis", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "docIntroducer", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "inline", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = ElementInline.class, required = false), @XmlElementRef(name = "eop", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "placeholder", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Placeholder.class, required = false), @XmlElementRef(name = "i", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "vote", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Vote.class, required = false), @XmlElementRef(name = "docJurisdiction", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "br", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "neutralCitation", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "object", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "decoration", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "entity", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Entity.class, required = false), @XmlElementRef(name = "span", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "sup", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "a", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = A.class, required = false), @XmlElementRef(name = "docStatus", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "signature", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "affectedDocument", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = AffectedDocument.class, required = false), @XmlElementRef(name = "docAuthority", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "abbr", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "judge", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Judge.class, required = false), @XmlElementRef(name = "subFlow", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "role", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "docCommittee", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = DocCommittee.class, required = false), @XmlElementRef(name = "docStage", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "process", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "quantity", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Quantity.class, required = false), @XmlElementRef(name = "img", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Img.class, required = false), @XmlElementRef(name = "term", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "lawyer", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Lawyer.class, required = false), @XmlElementRef(name = "def", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "rmod", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Rmod.class, required = false), @XmlElementRef(name = "legislature", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Legislature.class, required = false), @XmlElementRef(name = "remark", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Remark.class, required = false), @XmlElementRef(name = "ul", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "ref", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Ref.class, required = false), @XmlElementRef(name = "recordedTime", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = RecordedTime.class, required = false), @XmlElementRef(name = "noteRef", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = NoteRef.class, required = false), @XmlElementRef(name = "ol", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "del", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "marker", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Marker.class, required = false), @XmlElementRef(name = "embeddedStructure", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = EmbeddedStructure.class, required = false), @XmlElementRef(name = "docketNumber", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "authorialNote", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = AuthorialNote.class, required = false), @XmlElementRef(name = "shortTitle", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "p", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "rref", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Rref.class, required = false), @XmlElementRef(name = "party", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Party.class, required = false), @XmlElementRef(name = "argument", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "change", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "u", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "event", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "person", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = Person.class, required = false), @XmlElementRef(name = "ins", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false), @XmlElementRef(name = "docProponent", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = DocProponent.class, required = false), @XmlElementRef(name = "courtType", namespace = "http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD11", type = JAXBElement.class, required = false) }) @XmlMixed protected List<Serializable> content; @XmlAttribute(name = "status") protected StatusType status; @XmlAttribute(name = "period") @XmlSchemaType(name = "anyURI") protected String period; @XmlAttribute(name = "class") protected String clazz; @XmlAttribute(name = "style") protected String style; @XmlAttribute(name = "title") protected String titleAttr; @XmlAttribute(name = "refersTo") protected List<String> refersTo; @XmlAttribute(name = "eId") protected String eId; @XmlAttribute(name = "wId") protected String wId; @XmlAttribute(name = "GUID") protected String guid; @XmlAttribute(name = "alternativeTo") @XmlSchemaType(name = "anyURI") protected String alternativeTo; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") protected String lang; @XmlAttribute(name = "space", namespace = "http://www.w3.org/XML/1998/namespace") protected String space; @XmlAttribute(name = "value") protected String value; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Session } * {@link EmbeddedText } * {@link JAXBElement }{@code <}{@link EolType }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ModType }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Date } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link RelatedDocument } * {@link Time } * {@link DocDate } * {@link JAXBElement }{@code <}{@link ModType }{@code >} * {@link FillIn } * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link Opinion } * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link ElementInline } * {@link JAXBElement }{@code <}{@link EolType }{@code >} * {@link Placeholder } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Vote } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link Markeropt }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Entity } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link A } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link AffectedDocument } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Judge } * {@link JAXBElement }{@code <}{@link SubFlowStructure }{@code >} * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link DocCommittee } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link Quantity } * {@link Img } * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link Lawyer } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Rmod } * {@link Legislature } * {@link Remark } * {@link JAXBElement }{@code <}{@link ListItems }{@code >} * {@link Ref } * {@link RecordedTime } * {@link NoteRef } * {@link JAXBElement }{@code <}{@link ListItems }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Marker } * {@link EmbeddedStructure } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link AuthorialNote } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link Rref } * {@link Party } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link JAXBElement }{@code <}{@link Inlinereqreq }{@code >} * {@link Person } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * {@link String } * {@link DocProponent } * {@link JAXBElement }{@code <}{@link ComplexTypeInline }{@code >} * * */ public List<Serializable> getContent() { if (content == null) { content = new ArrayList<Serializable>(); } return this.content; } /** * Gets the value of the status property. * * @return * possible object is * {@link StatusType } * */ public StatusType getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link StatusType } * */ public void setStatus(StatusType value) { this.status = value; } /** * Gets the value of the period property. * * @return * possible object is * {@link String } * */ public String getPeriod() { return period; } /** * Sets the value of the period property. * * @param value * allowed object is * {@link String } * */ public void setPeriod(String value) { this.period = value; } /** * Gets the value of the clazz property. * * @return * possible object is * {@link String } * */ public String getClazz() { return clazz; } /** * Sets the value of the clazz property. * * @param value * allowed object is * {@link String } * */ public void setClazz(String value) { this.clazz = value; } /** * Gets the value of the style property. * * @return * possible object is * {@link String } * */ public String getStyle() { return style; } /** * Sets the value of the style property. * * @param value * allowed object is * {@link String } * */ public void setStyle(String value) { this.style = value; } /** * Gets the value of the titleAttr property. * * @return * possible object is * {@link String } * */ public String getTitleAttr() { return titleAttr; } /** * Sets the value of the titleAttr property. * * @param value * allowed object is * {@link String } * */ public void setTitleAttr(String value) { this.titleAttr = value; } /** * Gets the value of the refersTo property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the refersTo property. * * <p> * For example, to add a new item, do as follows: * <pre> * getRefersTo().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getRefersTo() { if (refersTo == null) { refersTo = new ArrayList<String>(); } return this.refersTo; } /** * Gets the value of the eId property. * * @return * possible object is * {@link String } * */ public String getEId() { return eId; } /** * Sets the value of the eId property. * * @param value * allowed object is * {@link String } * */ public void setEId(String value) { this.eId = value; } /** * Gets the value of the wId property. * * @return * possible object is * {@link String } * */ public String getWId() { return wId; } /** * Sets the value of the wId property. * * @param value * allowed object is * {@link String } * */ public void setWId(String value) { this.wId = value; } /** * Gets the value of the guid property. * * @return * possible object is * {@link String } * */ public String getGUID() { return guid; } /** * Sets the value of the guid property. * * @param value * allowed object is * {@link String } * */ public void setGUID(String value) { this.guid = value; } /** * Gets the value of the alternativeTo property. * * @return * possible object is * {@link String } * */ public String getAlternativeTo() { return alternativeTo; } /** * Sets the value of the alternativeTo property. * * @param value * allowed object is * {@link String } * */ public void setAlternativeTo(String value) { this.alternativeTo = value; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } /** * Gets the value of the space property. * * @return * possible object is * {@link String } * */ public String getSpace() { return space; } /** * Sets the value of the space property. * * @param value * allowed object is * {@link String } * */ public void setSpace(String value) { this.space = value; } /** * Gets the value of the value property. * * @return * possible object is * {@link String } * */ public String getValue() { return value; } /** * Sets the value of the value property. * * @param value * allowed object is * {@link String } * */ public void setValue(String value) { this.value = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
kohsah/akomantoso-lib
src/main/java/org/akomantoso/schema/v3/csd11/Li.java
Java
apache-2.0
27,264
/* Copyright 2015 Yurii Litvinov and CyberTech Labs Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "systemConsoleInterface.h" namespace trikHal { namespace trik { /// Real implementation of system console. class TrikSystemConsole : public SystemConsoleInterface { public: int system(const QString &command) override; bool startProcess(const QString &processName, const QStringList &arguments) override; bool startProcessSynchronously(const QString &processName, const QStringList &arguments , QString * output = nullptr) override; }; } }
iakov/trikRuntime
trikHal/src/trik/trikSystemConsole.h
C
apache-2.0
1,090
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_45) on Thu Nov 13 21:21:59 UTC 2014 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> org.apache.hadoop.lib.service.hadoop Class Hierarchy (Apache Hadoop Main 2.6.0 API) </TITLE> <META NAME="date" CONTENT="2014-11-13"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.apache.hadoop.lib.service.hadoop Class Hierarchy (Apache Hadoop Main 2.6.0 API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../org/apache/hadoop/lib/service/package-tree.html"><B>PREV</B></A>&nbsp; &nbsp;<A HREF="../../../../../../org/apache/hadoop/lib/service/instrumentation/package-tree.html"><B>NEXT</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/hadoop/lib/service/hadoop/package-tree.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> Hierarchy For Package org.apache.hadoop.lib.service.hadoop </H2> </CENTER> <DL> <DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../overview-tree.html">All Packages</A></DL> <HR> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../org/apache/hadoop/lib/service/package-tree.html"><B>PREV</B></A>&nbsp; &nbsp;<A HREF="../../../../../../org/apache/hadoop/lib/service/instrumentation/package-tree.html"><B>NEXT</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/hadoop/lib/service/hadoop/package-tree.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &#169; 2014 <a href="http://www.apache.org">Apache Software Foundation</a>. All Rights Reserved. </BODY> </HTML>
SAT-Hadoop/hadoop-2.6.0
share/doc/hadoop/api/org/apache/hadoop/lib/service/hadoop/package-tree.html
HTML
apache-2.0
6,287
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.agent.api; public class GetDomRVersionAnswer extends Answer { public static final String ROUTER_NAME = "router.name"; public static final String ROUTER_IP = "router.ip"; String templateVersion; String scriptsVersion; protected GetDomRVersionAnswer() { } public GetDomRVersionAnswer(GetDomRVersionCmd cmd, String details, String templateVersion, String scriptsVersion) { super(cmd, true, details); this.templateVersion = templateVersion; this.scriptsVersion = scriptsVersion; } public GetDomRVersionAnswer(GetDomRVersionCmd cmd, String details) { super(cmd, false, details); } public String getTemplateVersion() { return this.templateVersion; } public String getScriptsVersion() { return this.scriptsVersion; } }
cinderella/incubator-cloudstack
api/src/com/cloud/agent/api/GetDomRVersionAnswer.java
Java
apache-2.0
1,661
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ws; import javax.jws.WebService; /** * * @author usuario */ @WebService public class PersonaWebService { public Persona crearPersona() { return new Persona(); } }
xroca/planFormacionJava
serviciosweb/Objetos/EjemploWsObjetosMaven/src/main/java/ws/PersonaWebService.java
Java
apache-2.0
378
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.kie.services.impl.admin; import static org.jbpm.services.api.query.QueryResultMapper.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.kie.scanner.KieMavenRepository.getKieMavenRepository; import java.io.File; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.assertj.core.api.Assertions; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.jbpm.kie.services.impl.KModuleDeploymentUnit; import org.jbpm.kie.services.impl.query.SqlQueryDefinition; import org.jbpm.kie.services.impl.query.mapper.RawListQueryMapper; import org.jbpm.kie.services.impl.query.mapper.TaskSummaryQueryMapper; import org.jbpm.kie.services.impl.query.mapper.UserTaskInstanceQueryMapper; import org.jbpm.kie.services.test.KModuleDeploymentServiceTest; import org.jbpm.kie.test.util.AbstractKieServicesBaseTest; import org.jbpm.kie.test.util.CountDownListenerFactory; import org.jbpm.services.api.ProcessInstanceNotFoundException; import org.jbpm.services.api.TaskNotFoundException; import org.jbpm.services.api.admin.TaskNotification; import org.jbpm.services.api.admin.TaskReassignment; import org.jbpm.services.api.admin.UserTaskAdminService; import org.jbpm.services.api.model.DeploymentUnit; import org.jbpm.services.api.model.UserTaskInstanceDesc; import org.jbpm.services.api.query.model.QueryDefinition; import org.jbpm.services.api.query.model.QueryParam; import org.jbpm.services.api.query.model.QueryDefinition.Target; import org.jbpm.shared.services.impl.TransactionalCommandService; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kie.api.KieServices; import org.kie.api.builder.ReleaseId; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.runtime.query.QueryContext; import org.kie.api.task.model.OrganizationalEntity; import org.kie.api.task.model.Status; import org.kie.api.task.model.TaskSummary; import org.kie.internal.query.QueryFilter; import org.kie.internal.runtime.conf.ObjectModel; import org.kie.internal.task.api.TaskModelFactory; import org.kie.internal.task.api.TaskModelProvider; import org.kie.internal.task.api.model.EmailNotification; import org.kie.internal.task.api.model.TaskEvent; import org.kie.scanner.KieMavenRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; public class UserTaskAdminServiceImplTest extends AbstractKieServicesBaseTest { private static final Logger logger = LoggerFactory.getLogger(KModuleDeploymentServiceTest.class); protected static final String ADMIN_ARTIFACT_ID = "test-admin"; protected static final String ADMIN_GROUP_ID = "org.jbpm.test"; protected static final String ADMIN_VERSION_V1 = "1.0.0"; private List<DeploymentUnit> units = new ArrayList<DeploymentUnit>(); private KModuleDeploymentUnit deploymentUnit; private Long processInstanceId = null; protected UserTaskAdminService userTaskAdminService; private TaskModelFactory factory = TaskModelProvider.getFactory(); @Before public void prepare() { configureServices(); logger.debug("Preparing kjar"); KieServices ks = KieServices.Factory.get(); // version 1 of kjar ReleaseId releaseId = ks.newReleaseId(ADMIN_GROUP_ID, ADMIN_ARTIFACT_ID, ADMIN_VERSION_V1); List<String> processes = new ArrayList<String>(); processes.add("repo/processes/general/humanTask.bpmn"); InternalKieModule kJar1 = createKieJar(ks, releaseId, processes); File pom = new File("target/admin", "pom.xml"); pom.getParentFile().mkdir(); try { FileOutputStream fs = new FileOutputStream(pom); fs.write(getPom(releaseId).getBytes()); fs.close(); } catch (Exception e) { } KieMavenRepository repository = getKieMavenRepository(); repository.installArtifact(releaseId, kJar1, pom); userTaskAdminService = new UserTaskAdminServiceImpl(); ((UserTaskAdminServiceImpl) userTaskAdminService).setUserTaskService(userTaskService); ((UserTaskAdminServiceImpl) userTaskAdminService).setRuntimeDataService(runtimeDataService); ((UserTaskAdminServiceImpl) userTaskAdminService).setIdentityProvider(identityProvider); ((UserTaskAdminServiceImpl) userTaskAdminService).setCommandService(new TransactionalCommandService(emf)); // now let's deploy to runtime both kjars deploymentUnit = new KModuleDeploymentUnit(ADMIN_GROUP_ID, ADMIN_ARTIFACT_ID, ADMIN_VERSION_V1); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); // set user to administrator so it will be allowed to do operations identityProvider.setName("Administrator"); identityProvider.setRoles(Collections.singletonList("")); } @After public void cleanup() { cleanupSingletonSessionId(); if (processInstanceId != null) { try { // let's abort process instance to leave the system in clear state processService.abortProcessInstance(processInstanceId); ProcessInstance pi = processService.getProcessInstance(processInstanceId); Assertions.assertThat(pi).isNull(); } catch (ProcessInstanceNotFoundException e) { // ignore it as it might already be completed/aborted } } if (units != null && !units.isEmpty()) { for (DeploymentUnit unit : units) { try { deploymentService.undeploy(unit); } catch (Exception e) { // do nothing in case of some failed tests to avoid next test to fail as well } } units.clear(); } close(); CountDownListenerFactory.clear(); } public void setUserTaskAdminService(UserTaskAdminService userTaskAdminService) { this.userTaskAdminService = userTaskAdminService; } @Test public void testAddPotentialOwnersNotBusinessAdmin() { identityProvider.setName("notAdmin"); processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); Assertions.assertThatThrownBy( () -> userTaskAdminService.addPotentialOwners(task.getId(), false, factory.newUser("john"))) .hasMessageContaining("User notAdmin is not business admin of task 1"); } @Test public void testAddPotentialOwnersToNonExistentTask() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); Assertions.assertThatThrownBy( () -> userTaskAdminService.addPotentialOwners(15456, false, factory.newUser("john"))) .hasMessageContaining("Task with id 15456 was not found"); } @Test public void testAddRemovePotentialOwnersAsGroup() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); // Forward the task to HR group (Add HR as potential owners) identityProvider.setRoles(Collections.singletonList("HR")); userTaskAdminService.addPotentialOwners(task.getId(), true, factory.newGroup("HR")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("katy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); // HR has no resources to handle so lets forward it to accounting userTaskAdminService.removePotentialOwners(task.getId(), factory.newGroup("HR")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("katy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); identityProvider.setRoles(Collections.singletonList("Accounting")); userTaskAdminService.addPotentialOwners(task.getId(), false, factory.newGroup("Accounting")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("mary", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test public void testAddPotentialOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.addPotentialOwners(task.getId(), false, factory.newUser("john")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Potential owners [john] have been added"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); userTaskAdminService.addPotentialOwners(task.getId(), true, factory.newUser("john")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test public void testAddPotentialOwnersWrongDeploymentId() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); assertThatExceptionOfType(TaskNotFoundException.class).isThrownBy(() -> { userTaskAdminService.addPotentialOwners("wrong-one", task.getId(), false, factory.newUser("john")); }) .withMessageContaining("Task with id " + task.getId() + " is not associated with wrong-one"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testAddExcludedOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.addExcludedOwners(task.getId(), false, factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Excluded owners [salaboy] have been added"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); userTaskAdminService.addExcludedOwners(task.getId(), true, factory.newUser("john")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testAddBusinessAdmins() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); userTaskAdminService.addBusinessAdmins(task.getId(), false, factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Business administrators [salaboy] have been added"); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); userTaskAdminService.addBusinessAdmins(task.getId(), true, factory.newUser("salaboy")); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testRemovePotentialOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.removePotentialOwners(task.getId(), factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Potential owners [salaboy] have been removed"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testRemoveExcludedOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.addExcludedOwners(task.getId(), false, factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Excluded owners [salaboy] have been added"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); userTaskAdminService.removeExcludedOwners(task.getId(), factory.newUser("salaboy")); events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(4); updatedEvent = events.get(3); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Excluded owners [salaboy] have been removed"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test public void testRemoveBusinessAdmin() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskAdminService.removeBusinessAdmins(task.getId(), factory.newUser("Administrator")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(2); TaskEvent updatedEvent = events.get(1); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Business administrators [Administrator] have been removed"); List<Status> readyStatuses = Arrays.asList(new Status[]{ org.kie.api.task.model.Status.Ready }); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministratorByStatus("Administrator", readyStatuses, new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testAddRemoveInputData() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Map<String, Object> inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).doesNotContainKey("added-input"); userTaskAdminService.addTaskInput(task.getId(), "added-input", "just a test"); inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).containsKey("added-input"); Assertions.assertThat(inputData.get("added-input")).isEqualTo("just a test"); Assertions.assertThat(inputData).doesNotContainKey("added-input2"); Assertions.assertThat(inputData).doesNotContainKey("added-input3"); Map<String, Object> extra = new HashMap<>(); extra.put("added-input2", "1"); extra.put("added-input3", "2"); userTaskAdminService.addTaskInputs(task.getId(), extra); inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).containsKey("added-input2"); Assertions.assertThat(inputData.get("added-input2")).isEqualTo("1"); Assertions.assertThat(inputData).containsKey("added-input3"); Assertions.assertThat(inputData.get("added-input3")).isEqualTo("2"); userTaskAdminService.removeTaskInputs(task.getId(), "added-input2", "added-input3"); inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).doesNotContainKey("added-input2"); Assertions.assertThat(inputData).doesNotContainKey("added-input3"); } @Test public void testRemoveOutputData() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Map<String, Object> output = new HashMap<>(); output.put("added-output", "draft"); userTaskService.saveContent(task.getId(), output); Map<String, Object> outputData = userTaskService.getTaskOutputContentByTaskId(task.getId()); Assertions.assertThat(outputData).containsKey("added-output"); Assertions.assertThat(outputData.get("added-output")).isEqualTo("draft"); userTaskAdminService.removeTaskOutputs(task.getId(), "added-output"); outputData = userTaskService.getTaskOutputContentByTaskId(task.getId()); Assertions.assertThat(outputData).doesNotContainKey("added-output"); } @Test(timeout=10000) public void testReassignNotStarted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskAdminService.reassignWhenNotStarted(task.getId(), "2s", factory.newUser("john")); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test(timeout=10000) public void testReassignNotStartedISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskAdminService.reassignWhenNotStarted(task.getId(), "PT2S", factory.newUser("john")); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test(timeout=10000) public void testReassignNotStartedInvalidTimeExpression() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "2ssssssss", factory.newUser("john")); }) .hasMessage("Error parsing time string: [ 2ssssssss ]"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), null, factory.newUser("john")); }) .hasMessage("Invalid time expression"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "", factory.newUser("john")); }) .hasMessage("Invalid time expression"); } @Test(timeout=10000) public void testReassignNotStartedInvalidTimeExpressionISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "PT2SSSSSSSSS", factory.newUser("john")); }) .hasMessage("Text cannot be parsed to a Duration"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), null, factory.newUser("john")); }) .hasMessage("Invalid time expression"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "", factory.newUser("john")); }) .hasMessage("Invalid time expression"); } @Test(timeout=10000) public void testReassignNotStartedInvalidOrgEntities() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "2s", null); }) .hasMessage("Invalid org entity"); } @Test(timeout=10000) public void testReassignNotStartedInvalidOrgEntitiesISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "PT2S", null); }) .hasMessage("Invalid org entity"); } @Test(timeout=10000) public void testReassignNotCompleted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); userTaskAdminService.reassignWhenNotCompleted(task.getId(), "2s", factory.newUser("john")); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); } @Test(timeout=10000) public void testReassignNotCompletedISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); userTaskAdminService.reassignWhenNotCompleted(task.getId(), "PT2S", factory.newUser("john")); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); } @Test(timeout=10000) public void testReassignNotCompletedInvalidTimeExpression() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "2ssssssss", factory.newUser("john")); }) .hasMessage("Error parsing time string: [ 2ssssssss ]"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), null, factory.newUser("john")); }) .hasMessage("Invalid time expression"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "", factory.newUser("john")); }) .hasMessage("Invalid time expression"); } @Test(timeout=10000) public void testReassignNotCompletedInvalidTimeExpressionISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "PT2SSSSS", factory.newUser("john")); }) .hasMessage("Text cannot be parsed to a Duration"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), null, factory.newUser("john")); }) .hasMessage("Invalid time expression"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "", factory.newUser("john")); }) .hasMessage("Invalid time expression"); } @Test(timeout=10000) public void testReassignNotCompletedInvalidOrgEntities() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "2s", null); }) .hasMessage("Invalid org entity"); } @Test(timeout=10000) public void testReassignNotCompletedInvalidOrgEntitiesISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "PT2S", null); }) .hasMessage("Invalid org entity"); } @Test(timeout=10000) public void testNotifyNotStarted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); userTaskAdminService.notifyWhenNotStarted(task.getId(), "2s", emailNotification); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test(timeout=10000) public void testNotifyNotStartedISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); userTaskAdminService.notifyWhenNotStarted(task.getId(), "PT2S", emailNotification); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test(timeout=10000) public void testNotifyNotCompleted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskNotification> notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); userTaskService.start(task.getId(), "salaboy"); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); userTaskAdminService.notifyWhenNotCompleted(task.getId(), "2s", emailNotification); notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); } @Test(timeout=10000) public void testNotifyNotCompletedISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskNotification> notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); userTaskService.start(task.getId(), "salaboy"); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); userTaskAdminService.notifyWhenNotCompleted(task.getId(), "PT2S", emailNotification); notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); } @Test(timeout=10000) public void testNotifyNotStartedAndCancel() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskNotification> notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); long notificationId = userTaskAdminService.notifyWhenNotStarted(task.getId(), "2s", emailNotification); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); userTaskAdminService.cancelNotification(task.getId(), notificationId); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); } @Test(timeout=10000) public void testNotifyNotStartedAndCancelISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskNotification> notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); long notificationId = userTaskAdminService.notifyWhenNotStarted(task.getId(), "PT2S", emailNotification); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); userTaskAdminService.cancelNotification(task.getId(), notificationId); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); } @Test(timeout=10000) public void testReassignNotStartedAndCancel() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Long reassignmentId = userTaskAdminService.reassignWhenNotStarted(task.getId(), "2s", factory.newUser("john")); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); userTaskAdminService.cancelReassignment(task.getId(), reassignmentId); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); } @Test(timeout=10000) public void testReassignNotStartedAndCancelISOFormat() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Long reassignmentId = userTaskAdminService.reassignWhenNotStarted(task.getId(), "PT2S", factory.newUser("john")); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); userTaskAdminService.cancelReassignment(task.getId(), reassignmentId); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); } @Test public void testGetTaskInstancesAsPotOwners() { String PO_TASK_QUERY = "select ti.taskId, ti.activationTime, ti.actualOwner, ti.createdBy, ti.createdOn, ti.deploymentId, " + "ti.description, ti.dueDate, ti.name, ti.parentId, ti.priority, ti.processId, ti.processInstanceId, " + "ti.processSessionId, ti.status, ti.workItemId, oe.id, eo.entity_id " + "from AuditTaskImpl ti " + "left join PeopleAssignments_PotOwners po on ti.taskId = po.task_id " + "left join OrganizationalEntity oe on po.entity_id = oe.id " + " left join PeopleAssignments_ExclOwners eo on ti.taskId = eo.task_id "; SqlQueryDefinition query = new SqlQueryDefinition("getMyTaskInstances", "jdbc/testDS1", Target.PO_TASK); query.setExpression(PO_TASK_QUERY); queryService.registerQuery(query); List<QueryDefinition> queries = queryService.getQueries(new QueryContext()); assertNotNull(queries); assertEquals(1, queries.size()); QueryDefinition registeredQuery = queries.get(0); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); registeredQuery = queryService.getQuery(query.getName()); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); Map<String, Object> params = new HashMap<String, Object>(); params.put("approval_document", "initial content"); processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params); assertNotNull(processInstanceId); identityProvider.setName("notvalid"); List<UserTaskInstanceDesc> taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(0, taskInstanceLogs.size()); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(1, taskInstanceLogs.size()); List<TaskSummary> taskSummaries = queryService.query(query.getName(), TaskSummaryQueryMapper.get(), new QueryContext()); assertNotNull(taskSummaries); assertEquals(1, taskSummaries.size()); identityProvider.setName("Administrator"); userTaskAdminService.addPotentialOwners(taskSummaries.get(0).getId(), false, factory.newUser("john")); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(1, taskInstanceLogs.size()); taskSummaries = queryService.query(query.getName(), TaskSummaryQueryMapper.get(), new QueryContext()); assertNotNull(taskSummaries); assertEquals(1, taskSummaries.size()); QueryParam[] parameters = QueryParam.getBuilder().append(QueryParam.groupBy(COLUMN_NAME)).append(QueryParam.count(COLUMN_TASKID)).get(); Collection<List<Object>> instances = queryService.query(query.getName(), RawListQueryMapper.get(), new QueryContext(), parameters); assertNotNull(instances); assertEquals(1, instances.size()); List<Object> result = instances.iterator().next(); assertNotNull(result); assertEquals(2, result.size()); // here we have count set to 2 because group by is on name and thus it returns duplicates assertTrue(result.get(1) instanceof Number); assertEquals(2, ((Number) result.get(1)).intValue()); processService.abortProcessInstance(processInstanceId); processInstanceId = null; } @Test public void testGetTaskInstancesAsPotOwnersMultipleInstances() { String PO_TASK_QUERY = "select ti.taskId, ti.activationTime, ti.actualOwner, ti.createdBy, ti.createdOn, ti.deploymentId, " + "ti.description, ti.dueDate, ti.name, ti.parentId, ti.priority, ti.processId, ti.processInstanceId, " + "ti.processSessionId, ti.status, ti.workItemId, oe.id, eo.entity_id " + "from AuditTaskImpl ti " + "left join PeopleAssignments_PotOwners po on ti.taskId = po.task_id " + "left join OrganizationalEntity oe on po.entity_id = oe.id " + " left join PeopleAssignments_ExclOwners eo on ti.taskId = eo.task_id "; SqlQueryDefinition query = new SqlQueryDefinition("getMyTaskInstances", "jdbc/testDS1", Target.PO_TASK); query.setExpression(PO_TASK_QUERY); queryService.registerQuery(query); List<QueryDefinition> queries = queryService.getQueries(new QueryContext()); assertNotNull(queries); assertEquals(1, queries.size()); QueryDefinition registeredQuery = queries.get(0); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); registeredQuery = queryService.getQuery(query.getName()); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); Map<String, Object> params = new HashMap<String, Object>(); params.put("approval_document", "initial content"); processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params); assertNotNull(processInstanceId); Long processInstanceId2 = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params); assertNotNull(processInstanceId); assertNotNull(processInstanceId2); identityProvider.setName("notvalid"); List<UserTaskInstanceDesc> taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(0, taskInstanceLogs.size()); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(2, taskInstanceLogs.size()); identityProvider.setName("Administrator"); userTaskAdminService.addPotentialOwners(taskInstanceLogs.get(0).getTaskId(), false, factory.newUser("john")); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(2, taskInstanceLogs.size()); processService.abortProcessInstance(processInstanceId); processInstanceId = null; processService.abortProcessInstance(processInstanceId2); processInstanceId2 = null; } /* * Helper methods */ @Override protected List<ObjectModel> getTaskListeners() { List<ObjectModel> listeners = super.getTaskListeners(); listeners.add(new ObjectModel("mvel", "org.jbpm.kie.test.util.CountDownListenerFactory.getTask(\"userTaskAdminService\", 1)")); return listeners; } protected boolean createDescriptor() { return true; } }
romartin/jbpm
jbpm-services/jbpm-kie-services/src/test/java/org/jbpm/kie/services/impl/admin/UserTaskAdminServiceImplTest.java
Java
apache-2.0
59,966
import { NgModule } from '@angular/core'; import { FormsModule } from '@angular/forms'; import { HttpModule } from '@angular/http'; import { CommonModule } from '@angular/common'; import { NgbModule } from '@ng-bootstrap/ng-bootstrap'; import { NgJhipsterModule } from 'ng-jhipster'; import { InfiniteScrollModule } from 'angular2-infinite-scroll'; @NgModule({ imports: [ NgbModule.forRoot(), NgJhipsterModule.forRoot({ <%_ if (enableTranslation) { _%> i18nEnabled: true, defaultI18nLang: '<%= nativeLanguage %>' <%_ } _%> }), InfiniteScrollModule ], exports: [ FormsModule, HttpModule, CommonModule, NgbModule, NgJhipsterModule, InfiniteScrollModule ] }) export class <%=angular2AppName%>SharedLibsModule {}
dads-software-brotherhood/sekc
node_modules/generator-jhipster/generators/client/templates/angular/src/main/webapp/app/shared/_shared-libs.module.ts
TypeScript
apache-2.0
854
/* * Copyright 2015 Textocat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.textocat.textokit.morph.opencorpora.resource; import com.google.common.base.Objects; import com.google.common.collect.ImmutableMap; import com.google.common.collect.LinkedHashMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.textocat.textokit.morph.dictionary.resource.GramModel; import com.textocat.textokit.morph.model.Grammeme; import com.textocat.textokit.morph.model.Lemma; import com.textocat.textokit.morph.model.LemmaLinkType; import com.textocat.textokit.morph.model.Wordform; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.Attributes; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; import java.util.Deque; import java.util.List; import java.util.Map; import java.util.Set; import static com.google.common.collect.Maps.newHashMapWithExpectedSize; import static com.google.common.collect.Sets.newHashSet; class DictionaryXmlHandler extends DefaultHandler { private static final Logger log = LoggerFactory .getLogger(DictionaryXmlHandler.class); private static final String ELEM_DICTIONARY = "dictionary"; private static final String ATTR_DICTIONARY_VERSION = "version"; private static final String ATTR_DICTIONARY_REVISION = "revision"; private static final String ELEM_GRAMMEMS = "grammemes"; private static final String ELEM_GRAMMEM = "grammeme"; private static final String ATTR_GRAMMEM_PARENT = "parent"; private static final String ELEM_GRAMMEM_NAME = "name"; private static final String ELEM_GRAMMEM_ALIAS = "alias"; private static final String ELEM_GRAMMEM_DESCRIPTION = "description"; private static final String ELEM_RESTRICTIONS = "restrictions"; private static final String ELEM_LEMMATA = "lemmata"; private static final String ELEM_LEMMA = "lemma"; private static final String ATTR_LEMMA_ID = "id"; @SuppressWarnings("unused") private static final String ATTR_LEMMA_REV = "rev"; private static final String ELEM_LEMMA_NORM = "l"; private static final String ATTR_TEXT = "t"; private static final String ELEM_WF_GRAMMEM = "g"; private static final String ELEM_LEMMA_GRAMMEM = ELEM_WF_GRAMMEM; private static final String ATTR_WF_GRAMMEME_ID = "v"; private static final String ELEM_WORDFORM = "f"; private static final String ELEM_LINK_TYPES = "link_types"; private static final String ELEM_LINK_TYPE = "type"; private static final String ATTR_LINK_TYPE_ID = "id"; private static final String ELEM_LINKS = "links"; private static final String ELEM_LINK = "link"; @SuppressWarnings("unused") private static final String ATTR_LINK_ID = "id"; private static final String ATTR_LINK_FROM = "from"; private static final String ATTR_LINK_TO = "to"; private static final String ATTR_LINK_TYPE = "type"; private abstract class ElementHandler { protected final String qName; private ElementHandler parentHandler; protected ElementHandler(String qName) { if (qName == null) throw new NullPointerException(qName); this.qName = qName; } protected final <EH> EH getParent(Class<EH> parentClass) { return parentClass.cast(parentHandler); } protected final void setParent(ElementHandler parent) { this.parentHandler = parent; } protected abstract void startElement(Attributes attrs); protected abstract void endElement(); protected abstract void characters(String str); /** * @param elem * @return return handler for child element elem */ protected abstract ElementHandler getHandler(String elem); } private abstract class ElementHandlerBase extends ElementHandler { private Map<String, ElementHandler> children = ImmutableMap.of(); protected ElementHandlerBase(String qName) { super(qName); } @Override protected final void startElement(Attributes attrs) { children = declareChildren(); if (children != null) { for (ElementHandler child : children.values()) { child.setParent(this); } } startSelf(attrs); } @Override protected final void endElement() { endSelf(); // clear children this.children = null; } @Override protected void characters(String str) { if (!str.trim().isEmpty()) { throw new UnsupportedOperationException(String.format( "Unexpected characters within %s:\n%s", this.qName, str)); } } @Override protected final ElementHandler getHandler(String elem) { return children == null ? null : children.get(elem); } protected abstract void startSelf(Attributes attrs); protected abstract void endSelf(); protected abstract Map<String, ElementHandler> declareChildren(); } private class RootHandler extends ElementHandler { private ElementHandlerBase topHandler; RootHandler(ElementHandlerBase topHandler) { super("%ROOT%"); this.topHandler = topHandler; } @Override protected void startElement(Attributes attrs) { throw new IllegalStateException(); } @Override protected void endElement() { throw new IllegalStateException(); } @Override protected void characters(String str) { if (!str.trim().isEmpty()) { throw new IllegalStateException(); } } @Override protected ElementHandler getHandler(String elem) { if (Objects.equal(elem, topHandler.qName)) { return topHandler; } return null; } } private abstract class NoOpHandler extends ElementHandlerBase { NoOpHandler(String qName) { super(qName); } @Override protected void startSelf(Attributes attrs) { } @Override protected void endSelf() { } } private class IgnoreHandler extends ElementHandler { protected IgnoreHandler(String qName) { super(qName); } @Override protected void startElement(Attributes attrs) { // ignore } @Override protected void endElement() { // ignore } @Override protected void characters(String str) { // ignore } @Override protected ElementHandler getHandler(String elem) { // ignore all children IgnoreHandler result = new IgnoreHandler(elem); result.setParent(this); return result; } } private class ReadContentHandler extends NoOpHandler { private String content; ReadContentHandler(String qName) { super(qName); } @Override protected Map<String, ElementHandler> declareChildren() { return null; } @Override protected void characters(String str) { this.content = str.trim(); } String getContent() { return content; } } private class DictionaryElemHandler extends ElementHandlerBase { DictionaryElemHandler() { super(ELEM_DICTIONARY); } @Override protected void startSelf(Attributes attrs) { String version = requiredAttr(attrs, ATTR_DICTIONARY_VERSION); String revision = requiredAttr(attrs, ATTR_DICTIONARY_REVISION); dict.setVersion(version); dict.setRevision(revision); } @Override protected void endSelf() { } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet( new GrammemsHandler(), new IgnoreHandler(ELEM_RESTRICTIONS), new LemmataHandler(), new LinkTypesHandler(), new LinksHandler())); } } private class GrammemsHandler extends NoOpHandler { private ImmutableGramModel.Builder gmBuilder; GrammemsHandler() { super(ELEM_GRAMMEMS); } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new GrammemHandler())); } @Override protected void startSelf(Attributes attrs) { gmBuilder = ImmutableGramModel.builder(); } @Override protected void endSelf() { for (GramModelPostProcessor gmPP : gramModelProcessors) { gmPP.postprocess(gmBuilder); } GramModel gm = gmBuilder.build(); dict.setGramModel(gm); super.endSelf(); } } private class LemmataHandler extends NoOpHandler { LemmataHandler() { super(ELEM_LEMMATA); } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new LemmaHandler())); } } private class LinkTypesHandler extends NoOpHandler { LinkTypesHandler() { super(ELEM_LINK_TYPES); } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new LinkTypeHandler())); } } private class LinksHandler extends NoOpHandler { LinksHandler() { super(ELEM_LINKS); } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new LinkHandler())); } } private class GrammemHandler extends ElementHandlerBase { // state fields private String parentId; GrammemHandler() { super(ELEM_GRAMMEM); } @Override protected void startSelf(Attributes attrs) { parentId = requiredAttr(attrs, ATTR_GRAMMEM_PARENT); } @Override protected void endSelf() { String id = nameHandler.getContent(); if (id == null) { throw new IllegalStateException("Empty grammeme name"); } if (parentId.isEmpty()) { parentId = null; } String alias = aliasHandler.getContent(); String description = descHandler.getContent(); Grammeme gram = new Grammeme(id, parentId, alias, description); getParent(GrammemsHandler.class).gmBuilder.addGrammeme(gram); id = null; parentId = null; // child handlers are cleared by super class } private ReadContentHandler nameHandler; private ReadContentHandler aliasHandler; private ReadContentHandler descHandler; @Override protected Map<String, ElementHandler> declareChildren() { nameHandler = new ReadContentHandler(ELEM_GRAMMEM_NAME); aliasHandler = new ReadContentHandler(ELEM_GRAMMEM_ALIAS); descHandler = new ReadContentHandler(ELEM_GRAMMEM_DESCRIPTION); return toMap(newHashSet(nameHandler, aliasHandler, descHandler)); } } private class LemmaHandler extends ElementHandlerBase { private Lemma.Builder builder; // wf string => set of wf objects private Multimap<String, Wordform> wordforms; LemmaHandler() { super(ELEM_LEMMA); } @Override protected void startSelf(Attributes attrs) { builder = Lemma.builder(dict.getGramModel(), requiredInt(attrs, ATTR_LEMMA_ID)); wordforms = LinkedHashMultimap.create(); } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new LemmaNormHandler(), new WordformHandler())); } @Override protected void endSelf() { if (postProcessLemma(builder, wordforms)) { Lemma lemma = builder.build(); dict.addLemma(lemma); for (String wfStr : wordforms.keySet()) { for (Wordform wf : wordforms.get(wfStr)) { dict.addWordform(wfStr, wf); } } acceptedLemmaCounter++; } else { rejectedLemmaCounter++; } builder = null; wordforms = null; lemmasParsed++; if (lemmasParsed % 10000 == 0) { log.info("Lemmas have been parsed: {}", lemmasParsed); } } void addWordform(String text, Wordform wf) { if (!wordforms.put(text, wf)) { log.warn("Duplicate pair <{}, {}> at line {}", new Object[]{ text, wf, docLocator.getLineNumber() }); } } } private class LemmaNormHandler extends ElementHandlerBase { LemmaNormHandler() { super(ELEM_LEMMA_NORM); } @Override protected void startSelf(Attributes attrs) { String t = requiredAttr(attrs, ATTR_TEXT); getParent(LemmaHandler.class).builder.setString(t); } @Override protected void endSelf() { } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new LemmaGrammemHandler())); } } private class WordformHandler extends ElementHandlerBase { private Wordform.Builder builder; private String text; WordformHandler() { super(ELEM_WORDFORM); } @Override protected void startSelf(Attributes attrs) { int lemmaId = getParent(LemmaHandler.class).builder.getLemmaId(); builder = Wordform.builder(dict.getGramModel(), lemmaId); text = requiredAttr(attrs, ATTR_TEXT); } @Override protected void endSelf() { getParent(LemmaHandler.class).addWordform(text, builder.build()); builder = null; } @Override protected Map<String, ElementHandler> declareChildren() { return toMap(newHashSet(new WordformGrammemHandler())); } } private abstract class GrammemRefHandler extends ElementHandlerBase { protected String gramId; GrammemRefHandler(String qName) { super(qName); } @Override protected void startSelf(Attributes attrs) { gramId = requiredAttr(attrs, ATTR_WF_GRAMMEME_ID); } @Override protected Map<String, ElementHandler> declareChildren() { return null; } } private class LemmaGrammemHandler extends GrammemRefHandler { LemmaGrammemHandler() { super(ELEM_LEMMA_GRAMMEM); } @Override protected void endSelf() { getParent(LemmaNormHandler.class).getParent(LemmaHandler.class).builder .addGrammeme(gramId); gramId = null; } } private class WordformGrammemHandler extends GrammemRefHandler { WordformGrammemHandler() { super(ELEM_WF_GRAMMEM); } @Override protected void endSelf() { getParent(WordformHandler.class).builder.addGrammeme(gramId); gramId = null; } } private class LinkTypeHandler extends ElementHandlerBase { private String name; private Short id; LinkTypeHandler() { super(ELEM_LINK_TYPE); } @Override protected void startSelf(Attributes attrs) { id = requiredShort(attrs, ATTR_LINK_TYPE_ID); } @Override protected void endSelf() { if (name == null) { throw new IllegalStateException("Link type element is empty"); } LemmaLinkType lemmaLinkType = new LemmaLinkType(id, name); dict.addLemmaLinkType(lemmaLinkType); id = null; name = null; } @Override protected void characters(String str) { str = str.trim(); if (str.isEmpty()) { throw new IllegalStateException("Empty lemma link name"); } name = str; } @Override protected Map<String, ElementHandler> declareChildren() { return null; } } private class LinkHandler extends ElementHandlerBase { LinkHandler() { super(ELEM_LINK); } @Override protected void startSelf(Attributes attrs) { int fromId = requiredInt(attrs, ATTR_LINK_FROM); int toId = requiredInt(attrs, ATTR_LINK_TO); short linkTypeId = requiredShort(attrs, ATTR_LINK_TYPE); dict.addLemmaLink(fromId, toId, linkTypeId); } @Override protected void endSelf() { } @Override protected Map<String, ElementHandler> declareChildren() { return null; } } // config fields private List<LemmaPostProcessor> lemmaPostProcessors = Lists.newLinkedList(); private List<GramModelPostProcessor> gramModelProcessors = Lists.newLinkedList(); // state fields private MorphDictionaryImpl dict; private Deque<String> elemStack = Lists.newLinkedList(); private Deque<ElementHandler> handlerStack = Lists.newLinkedList(); private int lemmasParsed = 0; private int acceptedLemmaCounter; private int rejectedLemmaCounter; private ElementHandler rootHandler; private Locator docLocator; DictionaryXmlHandler(MorphDictionaryImpl dict) { this.dict = dict; } /** * NOTE! Order of LemmaPostProcessor instances may be crucial! * * @param lemmaPP instance to add */ public void addLemmaPostProcessor(LemmaPostProcessor lemmaPP) { lemmaPostProcessors.add(lemmaPP); } public void addGramModelPostProcessor(GramModelPostProcessor gmPP) { gramModelProcessors.add(gmPP); } @Override public void setDocumentLocator(Locator locator) { super.setDocumentLocator(locator); this.docLocator = locator; } @Override public void startDocument() throws SAXException { handlerStack.clear(); elemStack.clear(); lemmasParsed = 0; acceptedLemmaCounter = 0; rejectedLemmaCounter = 0; finished = false; rootHandler = new RootHandler(new DictionaryElemHandler()); handlerStack.addFirst(rootHandler); } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { String elem = localName; if (elem.isEmpty()) { elem = qName; } elemStack.addFirst(elem); ElementHandler contextHandler = handlerStack.getFirst(); ElementHandler elemHandler = contextHandler.getHandler(elem); if (elemHandler == null) { throw new IllegalStateException(String.format( "Context handler %s have not returned handler for elem %s", contextHandler, elemStack)); } handlerStack.addFirst(elemHandler); elemHandler.startElement(attributes); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { String elem = localName; if (elem.isEmpty()) { elem = qName; } // check doc structure sanity if (!elemStack.getFirst().equals(elem)) { throw new IllegalStateException(String.format( "Elem ending expected: %s, but was: %s", elemStack.getFirst(), elem)); } ElementHandler elemHandler = handlerStack.removeFirst(); if (sb != null) { String txt = sb.toString(); // ! txt.trim(); elemHandler.characters(txt); sb = null; } elemHandler.endElement(); elemStack.removeFirst(); } private StringBuilder sb; @Override public void characters(char[] ch, int start, int length) throws SAXException { if (sb == null) { sb = new StringBuilder(); } sb.append(ch, start, length); } private boolean finished; @Override public void endDocument() throws SAXException { log.info("The dictionary xml parsing is finished. Firing 'dictionaryParsed' event..."); for (LemmaPostProcessor lpp : lemmaPostProcessors) { lpp.dictionaryParsed(dict); } // sanity check if (!elemStack.isEmpty()) { throw new IllegalStateException( "Elem stack is not empty at the end: " + elemStack); } log.info("Lemmas accepted: {}\nLemmas rejected: {}", acceptedLemmaCounter, rejectedLemmaCounter); dict.complete(); finished = true; } public MorphDictionaryImpl getDictionary() { if (!finished) { throw new IllegalStateException("Parsing is not finished"); } return dict; } /** * Invoke lemma post-processors * * @param lemma * @param wfMap mutable map of wordform_string => set_of_wordform_objects * @return true if given lemma must be accepted, false - otherwise. */ private boolean postProcessLemma(Lemma.Builder lemmaBuilder, Multimap<String, Wordform> wfMap) { for (LemmaPostProcessor filter : lemmaPostProcessors) { if (!filter.process(dict, lemmaBuilder, wfMap)) { return false; } } return true; } @SuppressWarnings("unused") private boolean insideElem(String elem) { return elemStack.contains(elem); } private static String requiredAttr(Attributes attrs, String qName) { String result = attrs.getValue(qName); if (result == null) { throw new IllegalStateException("attribute " + qName + " is required"); } return result.trim(); } private static short requiredShort(Attributes attrs, String qName) { String resultStr = requiredAttr(attrs, qName); try { return Short.valueOf(resultStr); } catch (NumberFormatException e) { throw new IllegalStateException(String.format( "Attribute %s value is not number: %s", qName, resultStr), e); } } private static int requiredInt(Attributes attrs, String qName) { String resultStr = requiredAttr(attrs, qName); try { return Integer.valueOf(resultStr); } catch (NumberFormatException e) { throw new IllegalStateException(String.format( "Attribute %s value is not number: %s", qName, resultStr), e); } } private static Map<String, ElementHandler> toMap(Set<? extends ElementHandler> set) { Map<String, ElementHandler> result = newHashMapWithExpectedSize(set.size()); for (ElementHandler handler : set) { result.put(handler.qName, handler); } return result; } }
Denis220795/Textokit
Textokit.Morph.Dictionary.OpenCorpora/src/main/java/com/textocat/textokit/morph/opencorpora/resource/DictionaryXmlHandler.java
Java
apache-2.0
24,558
/** * Copyright 2011 Google Inc. * Copyright 2012 Matt Corallo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.bitcoin.script; import com.google.bitcoin.core.*; import com.google.bitcoin.crypto.TransactionSignature; import com.google.bitcoin.params.MainNetParams; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spongycastle.crypto.digests.RIPEMD160Digest; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.math.BigInteger; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; import static com.google.bitcoin.script.ScriptOpCodes.*; import static com.google.bitcoin.core.Utils.bytesToHexString; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; // TODO: Make this class a superclass with derived classes giving accessor methods for the various common templates. /** * <p>Programs embedded inside transactions that control redemption of payments.</p> * * <p>Bitcoin transactions don't specify what they do directly. Instead <a href="https://en.bitcoin.it/wiki/Script">a * small binary stack language</a> is used to define programs that when evaluated return whether the transaction * "accepts" or rejects the other transactions connected to it.</p> * * <p>In SPV mode, scripts are not run, because that would require all transactions to be available and lightweight * clients don't have that data. In full mode, this class is used to run the interpreted language. It also has * static methods for building scripts.</p> */ public class Script { private static final Logger log = LoggerFactory.getLogger(Script.class); public static final long MAX_SCRIPT_ELEMENT_SIZE = 520; // bytes // The program is a set of chunks where each element is either [opcode] or [data, data, data ...] protected List<ScriptChunk> chunks; // Unfortunately, scripts are not ever re-serialized or canonicalized when used in signature hashing. Thus we // must preserve the exact bytes that we read off the wire, along with the parsed form. protected byte[] program; // Creation time of the associated keys in seconds since the epoch. private long creationTimeSeconds; /** Creates an empty script that serializes to nothing. */ private Script() { chunks = Lists.newArrayList(); } // Used from ScriptBuilder. Script(List<ScriptChunk> chunks) { this.chunks = Collections.unmodifiableList(new ArrayList<ScriptChunk>(chunks)); creationTimeSeconds = Utils.currentTimeSeconds(); } /** * Construct a Script that copies and wraps the programBytes array. The array is parsed and checked for syntactic * validity. * @param programBytes Array of program bytes from a transaction. */ public Script(byte[] programBytes) throws ScriptException { program = programBytes; parse(programBytes); creationTimeSeconds = Utils.currentTimeSeconds(); } public Script(byte[] programBytes, long creationTimeSeconds) throws ScriptException { program = programBytes; parse(programBytes); this.creationTimeSeconds = creationTimeSeconds; } public long getCreationTimeSeconds() { return creationTimeSeconds; } public void setCreationTimeSeconds(long creationTimeSeconds) { this.creationTimeSeconds = creationTimeSeconds; } /** * Returns the program opcodes as a string, for example "[1234] DUP HASH160" */ public String toString() { StringBuilder buf = new StringBuilder(); for (ScriptChunk chunk : chunks) { if (chunk.isOpCode()) { buf.append(getOpCodeName(chunk.data[0])); buf.append(" "); } else { // Data chunk buf.append("["); buf.append(bytesToHexString(chunk.data)); buf.append("] "); } } return buf.toString().trim(); } /** Returns the serialized program as a newly created byte array. */ public byte[] getProgram() { try { // Don't round-trip as Satoshi's code doesn't and it would introduce a mismatch. if (program != null) return Arrays.copyOf(program, program.length); ByteArrayOutputStream bos = new ByteArrayOutputStream(); for (ScriptChunk chunk : chunks) { chunk.write(bos); } program = bos.toByteArray(); return program; } catch (IOException e) { throw new RuntimeException(e); // Cannot happen. } } /** Returns an immutable list of the scripts parsed form. */ public List<ScriptChunk> getChunks() { return Collections.unmodifiableList(chunks); } private static final ScriptChunk INTERN_TABLE[]; static { Script examplePayToAddress = ScriptBuilder.createOutputScript(new Address(MainNetParams.get(), new byte[20])); examplePayToAddress = new Script(examplePayToAddress.getProgram()); INTERN_TABLE = new ScriptChunk[] { examplePayToAddress.chunks.get(0), // DUP examplePayToAddress.chunks.get(1), // HASH160 examplePayToAddress.chunks.get(3), // EQUALVERIFY examplePayToAddress.chunks.get(4), // CHECKSIG }; } /** * <p>To run a script, first we parse it which breaks it up into chunks representing pushes of data or logical * opcodes. Then we can run the parsed chunks.</p> * * <p>The reason for this split, instead of just interpreting directly, is to make it easier * to reach into a programs structure and pull out bits of data without having to run it. * This is necessary to render the to/from addresses of transactions in a user interface. * The official client does something similar.</p> */ private void parse(byte[] program) throws ScriptException { chunks = new ArrayList<ScriptChunk>(5); // Common size. ByteArrayInputStream bis = new ByteArrayInputStream(program); int initialSize = bis.available(); while (bis.available() > 0) { int startLocationInProgram = initialSize - bis.available(); int opcode = bis.read(); long dataToRead = -1; if (opcode >= 0 && opcode < OP_PUSHDATA1) { // Read some bytes of data, where how many is the opcode value itself. dataToRead = opcode; } else if (opcode == OP_PUSHDATA1) { if (bis.available() < 1) throw new ScriptException("Unexpected end of script"); dataToRead = bis.read(); } else if (opcode == OP_PUSHDATA2) { // Read a short, then read that many bytes of data. if (bis.available() < 2) throw new ScriptException("Unexpected end of script"); dataToRead = bis.read() | (bis.read() << 8); } else if (opcode == OP_PUSHDATA4) { // Read a uint32, then read that many bytes of data. // Though this is allowed, because its value cannot be > 520, it should never actually be used if (bis.available() < 4) throw new ScriptException("Unexpected end of script"); dataToRead = ((long)bis.read()) | (((long)bis.read()) << 8) | (((long)bis.read()) << 16) | (((long)bis.read()) << 24); } ScriptChunk chunk; if (dataToRead == -1) { chunk = new ScriptChunk(true, new byte[]{(byte) opcode}, startLocationInProgram); } else { if (dataToRead > bis.available()) throw new ScriptException("Push of data element that is larger than remaining data"); byte[] data = new byte[(int)dataToRead]; checkState(dataToRead == 0 || bis.read(data, 0, (int)dataToRead) == dataToRead); chunk = new ScriptChunk(false, data, startLocationInProgram); } // Save some memory by eliminating redundant copies of the same chunk objects. INTERN_TABLE can be null // here because this method is called whilst setting it up. if (INTERN_TABLE != null) { for (ScriptChunk c : INTERN_TABLE) { if (c.equals(chunk)) chunk = c; } } chunks.add(chunk); } } /** * Returns true if this script is of the form <sig> OP_CHECKSIG. This form was originally intended for transactions * where the peers talked to each other directly via TCP/IP, but has fallen out of favor with time due to that mode * of operation being susceptible to man-in-the-middle attacks. It is still used in coinbase outputs and can be * useful more exotic types of transaction, but today most payments are to addresses. */ public boolean isSentToRawPubKey() { return chunks.size() == 2 && chunks.get(1).equalsOpCode(OP_CHECKSIG) && !chunks.get(0).isOpCode() && chunks.get(0).data.length > 1; } /** * Returns true if this script is of the form DUP HASH160 <pubkey hash> EQUALVERIFY CHECKSIG, ie, payment to an * address like 1VayNert3x1KzbpzMGt2qdqrAThiRovi8. This form was originally intended for the case where you wish * to send somebody money with a written code because their node is offline, but over time has become the standard * way to make payments due to the short and recognizable base58 form addresses come in. */ public boolean isSentToAddress() { return chunks.size() == 5 && chunks.get(0).equalsOpCode(OP_DUP) && chunks.get(1).equalsOpCode(OP_HASH160) && chunks.get(2).data.length == Address.LENGTH && chunks.get(3).equalsOpCode(OP_EQUALVERIFY) && chunks.get(4).equalsOpCode(OP_CHECKSIG); } /** * Returns true if this script is of the form OP_HASH160 <scriptHash> OP_EQUAL, ie, payment to an * address like 35b9vsyH1KoFT5a5KtrKusaCcPLkiSo1tU. This form was codified as part of BIP13 and BIP16, * for pay to script hash type addresses. */ public boolean isSentToP2SH() { return chunks.size() == 3 && chunks.get(0).equalsOpCode(OP_HASH160) && chunks.get(1).data.length == Address.LENGTH && chunks.get(2).equalsOpCode(OP_EQUAL); } /** * If a program matches the standard template DUP HASH160 <pubkey hash> EQUALVERIFY CHECKSIG * then this function retrieves the third element, otherwise it throws a ScriptException.<p> * * This is useful for fetching the destination address of a transaction. */ public byte[] getPubKeyHash() throws ScriptException { if (isSentToAddress()) return chunks.get(2).data; else if (isSentToP2SH()) return chunks.get(1).data; else throw new ScriptException("Script not in the standard scriptPubKey form"); } /** * Returns the public key in this script. If a script contains two constants and nothing else, it is assumed to * be a scriptSig (input) for a pay-to-address output and the second constant is returned (the first is the * signature). If a script contains a constant and an OP_CHECKSIG opcode, the constant is returned as it is * assumed to be a direct pay-to-key scriptPubKey (output) and the first constant is the public key. * * @throws ScriptException if the script is none of the named forms. */ public byte[] getPubKey() throws ScriptException { if (chunks.size() != 2) { throw new ScriptException("Script not of right size, expecting 2 but got " + chunks.size()); } if (chunks.get(0).data.length > 2 && chunks.get(1).data.length > 2) { // If we have two large constants assume the input to a pay-to-address output. return chunks.get(1).data; } else if (chunks.get(1).data.length == 1 && chunks.get(1).equalsOpCode(OP_CHECKSIG) && chunks.get(0).data.length > 2) { // A large constant followed by an OP_CHECKSIG is the key. return chunks.get(0).data; } else { throw new ScriptException("Script did not match expected form: " + toString()); } } /** * For 2-element [input] scripts assumes that the paid-to-address can be derived from the public key. * The concept of a "from address" isn't well defined in Bitcoin and you should not assume the sender of a * transaction can actually receive coins on it. This method may be removed in future. */ @Deprecated public Address getFromAddress(NetworkParameters params) throws ScriptException { return new Address(params, Utils.sha256hash160(getPubKey())); } /** * Gets the destination address from this script, if it's in the required form (see getPubKey). */ public Address getToAddress(NetworkParameters params) throws ScriptException { if (isSentToAddress()) return new Address(params, getPubKeyHash()); else if (isSentToP2SH()) return Address.fromP2SHScript(params, this); else throw new ScriptException("Cannot cast this script to a pay-to-address type"); } ////////////////////// Interface for writing scripts from scratch //////////////////////////////// /** * Writes out the given byte buffer to the output stream with the correct opcode prefix * To write an integer call writeBytes(out, Utils.reverseBytes(Utils.encodeMPI(val, false))); */ public static void writeBytes(OutputStream os, byte[] buf) throws IOException { if (buf.length < OP_PUSHDATA1) { os.write(buf.length); os.write(buf); } else if (buf.length < 256) { os.write(OP_PUSHDATA1); os.write(buf.length); os.write(buf); } else if (buf.length < 65536) { os.write(OP_PUSHDATA2); os.write(0xFF & (buf.length)); os.write(0xFF & (buf.length >> 8)); os.write(buf); } else { throw new RuntimeException("Unimplemented"); } } /** Creates a program that requires at least N of the given keys to sign, using OP_CHECKMULTISIG. */ public static byte[] createMultiSigOutputScript(int threshold, List<ECKey> pubkeys) { checkArgument(threshold > 0); checkArgument(threshold <= pubkeys.size()); checkArgument(pubkeys.size() <= 16); // That's the max we can represent with a single opcode. if (pubkeys.size() > 3) { log.warn("Creating a multi-signature output that is non-standard: {} pubkeys, should be <= 3", pubkeys.size()); } try { ByteArrayOutputStream bits = new ByteArrayOutputStream(); bits.write(encodeToOpN(threshold)); for (ECKey key : pubkeys) { writeBytes(bits, key.getPubKey()); } bits.write(encodeToOpN(pubkeys.size())); bits.write(OP_CHECKMULTISIG); return bits.toByteArray(); } catch (IOException e) { throw new RuntimeException(e); // Cannot happen. } } public static byte[] createInputScript(byte[] signature, byte[] pubkey) { try { // TODO: Do this by creating a Script *first* then having the script reassemble itself into bytes. ByteArrayOutputStream bits = new UnsafeByteArrayOutputStream(signature.length + pubkey.length + 2); writeBytes(bits, signature); writeBytes(bits, pubkey); return bits.toByteArray(); } catch (IOException e) { throw new RuntimeException(e); } } public static byte[] createInputScript(byte[] signature) { try { // TODO: Do this by creating a Script *first* then having the script reassemble itself into bytes. ByteArrayOutputStream bits = new UnsafeByteArrayOutputStream(signature.length + 2); writeBytes(bits, signature); return bits.toByteArray(); } catch (IOException e) { throw new RuntimeException(e); } } ////////////////////// Interface used during verification of transactions/blocks //////////////////////////////// private static int getSigOpCount(List<ScriptChunk> chunks, boolean accurate) throws ScriptException { int sigOps = 0; int lastOpCode = OP_INVALIDOPCODE; for (ScriptChunk chunk : chunks) { if (chunk.isOpCode()) { int opcode = 0xFF & chunk.data[0]; switch (opcode) { case OP_CHECKSIG: case OP_CHECKSIGVERIFY: sigOps++; break; case OP_CHECKMULTISIG: case OP_CHECKMULTISIGVERIFY: if (accurate && lastOpCode >= OP_1 && lastOpCode <= OP_16) sigOps += decodeFromOpN(lastOpCode); else sigOps += 20; break; default: break; } lastOpCode = opcode; } } return sigOps; } /** * Converts an opcode to its int representation (including OP_1NEGATE and OP_0/OP_FALSE) * @throws IllegalArgumentException If the opcode is not an OP_N opcode */ public static int decodeFromOpN(byte opcode) throws IllegalArgumentException { return decodeFromOpN((int)opcode); } static int decodeFromOpN(int opcode) { checkArgument((opcode == OP_0 || opcode == OP_1NEGATE) || (opcode >= OP_1 && opcode <= OP_16), "decodeFromOpN called on non OP_N opcode"); if (opcode == OP_0) return 0; else if (opcode == OP_1NEGATE) return -1; else return opcode + 1 - OP_1; } static int encodeToOpN(int value) { checkArgument(value >= -1 && value <= 16, "encodeToOpN called for " + value + " which we cannot encode in an opcode."); if (value == 0) return OP_0; else if (value == -1) return OP_1NEGATE; else return value - 1 + OP_1; } /** * Gets the count of regular SigOps in the script program (counting multisig ops as 20) */ public static int getSigOpCount(byte[] program) throws ScriptException { Script script = new Script(); try { script.parse(program); } catch (ScriptException e) { // Ignore errors and count up to the parse-able length } return getSigOpCount(script.chunks, false); } /** * Gets the count of P2SH Sig Ops in the Script scriptSig */ public static long getP2SHSigOpCount(byte[] scriptSig) throws ScriptException { Script script = new Script(); try { script.parse(scriptSig); } catch (ScriptException e) { // Ignore errors and count up to the parse-able length } for (int i = script.chunks.size() - 1; i >= 0; i--) if (!script.chunks.get(i).isOpCode()) { Script subScript = new Script(); subScript.parse(script.chunks.get(i).data); return getSigOpCount(subScript.chunks, true); } return 0; } /** * <p>Whether or not this is a scriptPubKey representing a pay-to-script-hash output. In such outputs, the logic that * controls reclamation is not actually in the output at all. Instead there's just a hash, and it's up to the * spending input to provide a program matching that hash. This rule is "soft enforced" by the network as it does * not exist in Satoshis original implementation. It means blocks containing P2SH transactions that don't match * correctly are considered valid, but won't be mined upon, so they'll be rapidly re-orgd out of the chain. This * logic is defined by <a href="https://github.com/bitcoin/bips/blob/master/bip-0016.mediawiki">BIP 16</a>.</p> * * <p>bitcoinj does not support creation of P2SH transactions today. The goal of P2SH is to allow short addresses * even for complex scripts (eg, multi-sig outputs) so they are convenient to work with in things like QRcodes or * with copy/paste, and also to minimize the size of the unspent output set (which improves performance of the * Bitcoin system).</p> */ public boolean isPayToScriptHash() { // We have to check against the serialized form because BIP16 defines a P2SH output using an exact byte // template, not the logical program structure. Thus you can have two programs that look identical when // printed out but one is a P2SH script and the other isn't! :( byte[] program = getProgram(); return program.length == 23 && (program[0] & 0xff) == OP_HASH160 && (program[1] & 0xff) == 0x14 && (program[22] & 0xff) == OP_EQUAL; } /** * Returns whether this script matches the format used for multisig outputs: [n] [keys...] [m] CHECKMULTISIG */ public boolean isSentToMultiSig() { if (chunks.size() < 4) return false; ScriptChunk chunk = chunks.get(chunks.size() - 1); // Must end in OP_CHECKMULTISIG[VERIFY]. if (!chunk.isOpCode()) return false; if (!(chunk.equalsOpCode(OP_CHECKMULTISIG) || chunk.equalsOpCode(OP_CHECKMULTISIGVERIFY))) return false; try { // Second to last chunk must be an OP_N opcode and there should be that many data chunks (keys). ScriptChunk m = chunks.get(chunks.size() - 2); if (!m.isOpCode()) return false; int numKeys = decodeFromOpN(m.data[0]); if (numKeys < 1 || chunks.size() != 3 + numKeys) return false; for (int i = 1; i < chunks.size() - 2; i++) { if (chunks.get(i).isOpCode()) return false; } // First chunk must be an OP_N opcode too. if (decodeFromOpN(chunks.get(0).data[0]) < 1) return false; } catch (IllegalStateException e) { return false; // Not an OP_N opcode. } return true; } private static boolean equalsRange(byte[] a, int start, byte[] b) { if (start + b.length > a.length) return false; for (int i = 0; i < b.length; i++) if (a[i + start] != b[i]) return false; return true; } /** * Returns the script bytes of inputScript with all instances of the specified script object removed */ public static byte[] removeAllInstancesOf(byte[] inputScript, byte[] chunkToRemove) { // We usually don't end up removing anything UnsafeByteArrayOutputStream bos = new UnsafeByteArrayOutputStream(inputScript.length); int cursor = 0; while (cursor < inputScript.length) { boolean skip = equalsRange(inputScript, cursor, chunkToRemove); int opcode = inputScript[cursor++] & 0xFF; int additionalBytes = 0; if (opcode >= 0 && opcode < OP_PUSHDATA1) { additionalBytes = opcode; } else if (opcode == OP_PUSHDATA1) { additionalBytes = (0xFF & inputScript[cursor]) + 1; } else if (opcode == OP_PUSHDATA2) { additionalBytes = ((0xFF & inputScript[cursor]) | ((0xFF & inputScript[cursor+1]) << 8)) + 2; } else if (opcode == OP_PUSHDATA4) { additionalBytes = ((0xFF & inputScript[cursor]) | ((0xFF & inputScript[cursor+1]) << 8) | ((0xFF & inputScript[cursor+1]) << 16) | ((0xFF & inputScript[cursor+1]) << 24)) + 4; } if (!skip) { try { bos.write(opcode); bos.write(Arrays.copyOfRange(inputScript, cursor, cursor + additionalBytes)); } catch (IOException e) { throw new RuntimeException(e); } } cursor += additionalBytes; } return bos.toByteArray(); } /** * Returns the script bytes of inputScript with all instances of the given op code removed */ public static byte[] removeAllInstancesOfOp(byte[] inputScript, int opCode) { return removeAllInstancesOf(inputScript, new byte[] {(byte)opCode}); } ////////////////////// Script verification and helpers //////////////////////////////// private static boolean castToBool(byte[] data) { for (int i = 0; i < data.length; i++) { // "Can be negative zero" -reference client (see OpenSSL's BN_bn2mpi) if (data[i] != 0) return !(i == data.length - 1 && (data[i] & 0xFF) == 0x80); } return false; } private static BigInteger castToBigInteger(byte[] chunk) throws ScriptException { if (chunk.length > 4) throw new ScriptException("Script attempted to use an integer larger than 4 bytes"); return Utils.decodeMPI(Utils.reverseBytes(chunk), false); } private static void executeScript(Transaction txContainingThis, long index, Script script, LinkedList<byte[]> stack) throws ScriptException { int opCount = 0; int lastCodeSepLocation = 0; LinkedList<byte[]> altstack = new LinkedList<byte[]>(); LinkedList<Boolean> ifStack = new LinkedList<Boolean>(); for (ScriptChunk chunk : script.chunks) { boolean shouldExecute = !ifStack.contains(false); if (!chunk.isOpCode()) { if (chunk.data.length > MAX_SCRIPT_ELEMENT_SIZE) throw new ScriptException("Attempted to push a data string larger than 520 bytes"); if (!shouldExecute) continue; stack.add(chunk.data); } else { int opcode = 0xFF & chunk.data[0]; if (opcode > OP_16) { opCount++; if (opCount > 201) throw new ScriptException("More script operations than is allowed"); } if (opcode == OP_VERIF || opcode == OP_VERNOTIF) throw new ScriptException("Script included OP_VERIF or OP_VERNOTIF"); if (opcode == OP_CAT || opcode == OP_SUBSTR || opcode == OP_LEFT || opcode == OP_RIGHT || opcode == OP_INVERT || opcode == OP_AND || opcode == OP_OR || opcode == OP_XOR || opcode == OP_2MUL || opcode == OP_2DIV || opcode == OP_MUL || opcode == OP_DIV || opcode == OP_MOD || opcode == OP_LSHIFT || opcode == OP_RSHIFT) throw new ScriptException("Script included a disabled Script Op."); switch (opcode) { case OP_IF: if (!shouldExecute) { ifStack.add(false); continue; } if (stack.size() < 1) throw new ScriptException("Attempted OP_IF on an empty stack"); ifStack.add(castToBool(stack.pollLast())); continue; case OP_NOTIF: if (!shouldExecute) { ifStack.add(false); continue; } if (stack.size() < 1) throw new ScriptException("Attempted OP_NOTIF on an empty stack"); ifStack.add(!castToBool(stack.pollLast())); continue; case OP_ELSE: if (ifStack.isEmpty()) throw new ScriptException("Attempted OP_ELSE without OP_IF/NOTIF"); ifStack.add(!ifStack.pollLast()); continue; case OP_ENDIF: if (ifStack.isEmpty()) throw new ScriptException("Attempted OP_ENDIF without OP_IF/NOTIF"); ifStack.pollLast(); continue; } if (!shouldExecute) continue; switch(opcode) { // OP_0 is no opcode case OP_1NEGATE: stack.add(Utils.reverseBytes(Utils.encodeMPI(BigInteger.ONE.negate(), false))); break; case OP_1: case OP_2: case OP_3: case OP_4: case OP_5: case OP_6: case OP_7: case OP_8: case OP_9: case OP_10: case OP_11: case OP_12: case OP_13: case OP_14: case OP_15: case OP_16: stack.add(Utils.reverseBytes(Utils.encodeMPI(BigInteger.valueOf(decodeFromOpN(opcode)), false))); break; case OP_NOP: break; case OP_VERIFY: if (stack.size() < 1) throw new ScriptException("Attempted OP_VERIFY on an empty stack"); if (!castToBool(stack.pollLast())) throw new ScriptException("OP_VERIFY failed"); break; case OP_RETURN: throw new ScriptException("Script called OP_RETURN"); case OP_TOALTSTACK: if (stack.size() < 1) throw new ScriptException("Attempted OP_TOALTSTACK on an empty stack"); altstack.add(stack.pollLast()); break; case OP_FROMALTSTACK: if (altstack.size() < 1) throw new ScriptException("Attempted OP_TOALTSTACK on an empty altstack"); stack.add(altstack.pollLast()); break; case OP_2DROP: if (stack.size() < 2) throw new ScriptException("Attempted OP_2DROP on a stack with size < 2"); stack.pollLast(); stack.pollLast(); break; case OP_2DUP: if (stack.size() < 2) throw new ScriptException("Attempted OP_2DUP on a stack with size < 2"); Iterator<byte[]> it2DUP = stack.descendingIterator(); byte[] OP2DUPtmpChunk2 = it2DUP.next(); stack.add(it2DUP.next()); stack.add(OP2DUPtmpChunk2); break; case OP_3DUP: if (stack.size() < 3) throw new ScriptException("Attempted OP_3DUP on a stack with size < 3"); Iterator<byte[]> it3DUP = stack.descendingIterator(); byte[] OP3DUPtmpChunk3 = it3DUP.next(); byte[] OP3DUPtmpChunk2 = it3DUP.next(); stack.add(it3DUP.next()); stack.add(OP3DUPtmpChunk2); stack.add(OP3DUPtmpChunk3); break; case OP_2OVER: if (stack.size() < 4) throw new ScriptException("Attempted OP_2OVER on a stack with size < 4"); Iterator<byte[]> it2OVER = stack.descendingIterator(); it2OVER.next(); it2OVER.next(); byte[] OP2OVERtmpChunk2 = it2OVER.next(); stack.add(it2OVER.next()); stack.add(OP2OVERtmpChunk2); break; case OP_2ROT: if (stack.size() < 6) throw new ScriptException("Attempted OP_2ROT on a stack with size < 6"); byte[] OP2ROTtmpChunk6 = stack.pollLast(); byte[] OP2ROTtmpChunk5 = stack.pollLast(); byte[] OP2ROTtmpChunk4 = stack.pollLast(); byte[] OP2ROTtmpChunk3 = stack.pollLast(); byte[] OP2ROTtmpChunk2 = stack.pollLast(); byte[] OP2ROTtmpChunk1 = stack.pollLast(); stack.add(OP2ROTtmpChunk3); stack.add(OP2ROTtmpChunk4); stack.add(OP2ROTtmpChunk5); stack.add(OP2ROTtmpChunk6); stack.add(OP2ROTtmpChunk1); stack.add(OP2ROTtmpChunk2); break; case OP_2SWAP: if (stack.size() < 4) throw new ScriptException("Attempted OP_2SWAP on a stack with size < 4"); byte[] OP2SWAPtmpChunk4 = stack.pollLast(); byte[] OP2SWAPtmpChunk3 = stack.pollLast(); byte[] OP2SWAPtmpChunk2 = stack.pollLast(); byte[] OP2SWAPtmpChunk1 = stack.pollLast(); stack.add(OP2SWAPtmpChunk3); stack.add(OP2SWAPtmpChunk4); stack.add(OP2SWAPtmpChunk1); stack.add(OP2SWAPtmpChunk2); break; case OP_IFDUP: if (stack.size() < 1) throw new ScriptException("Attempted OP_IFDUP on an empty stack"); if (castToBool(stack.getLast())) stack.add(stack.getLast()); break; case OP_DEPTH: stack.add(Utils.reverseBytes(Utils.encodeMPI(BigInteger.valueOf(stack.size()), false))); break; case OP_DROP: if (stack.size() < 1) throw new ScriptException("Attempted OP_DROP on an empty stack"); stack.pollLast(); break; case OP_DUP: if (stack.size() < 1) throw new ScriptException("Attempted OP_DUP on an empty stack"); stack.add(stack.getLast()); break; case OP_NIP: if (stack.size() < 2) throw new ScriptException("Attempted OP_NIP on a stack with size < 2"); byte[] OPNIPtmpChunk = stack.pollLast(); stack.pollLast(); stack.add(OPNIPtmpChunk); break; case OP_OVER: if (stack.size() < 2) throw new ScriptException("Attempted OP_OVER on a stack with size < 2"); Iterator<byte[]> itOVER = stack.descendingIterator(); itOVER.next(); stack.add(itOVER.next()); break; case OP_PICK: case OP_ROLL: if (stack.size() < 1) throw new ScriptException("Attempted OP_PICK/OP_ROLL on an empty stack"); long val = castToBigInteger(stack.pollLast()).longValue(); if (val < 0 || val >= stack.size()) throw new ScriptException("OP_PICK/OP_ROLL attempted to get data deeper than stack size"); Iterator<byte[]> itPICK = stack.descendingIterator(); for (long i = 0; i < val; i++) itPICK.next(); byte[] OPROLLtmpChunk = itPICK.next(); if (opcode == OP_ROLL) itPICK.remove(); stack.add(OPROLLtmpChunk); break; case OP_ROT: if (stack.size() < 3) throw new ScriptException("Attempted OP_ROT on a stack with size < 3"); byte[] OPROTtmpChunk3 = stack.pollLast(); byte[] OPROTtmpChunk2 = stack.pollLast(); byte[] OPROTtmpChunk1 = stack.pollLast(); stack.add(OPROTtmpChunk2); stack.add(OPROTtmpChunk3); stack.add(OPROTtmpChunk1); break; case OP_SWAP: case OP_TUCK: if (stack.size() < 2) throw new ScriptException("Attempted OP_SWAP on a stack with size < 2"); byte[] OPSWAPtmpChunk2 = stack.pollLast(); byte[] OPSWAPtmpChunk1 = stack.pollLast(); stack.add(OPSWAPtmpChunk2); stack.add(OPSWAPtmpChunk1); if (opcode == OP_TUCK) stack.add(OPSWAPtmpChunk2); break; case OP_CAT: case OP_SUBSTR: case OP_LEFT: case OP_RIGHT: throw new ScriptException("Attempted to use disabled Script Op."); case OP_SIZE: if (stack.size() < 1) throw new ScriptException("Attempted OP_SIZE on an empty stack"); stack.add(Utils.reverseBytes(Utils.encodeMPI(BigInteger.valueOf(stack.getLast().length), false))); break; case OP_INVERT: case OP_AND: case OP_OR: case OP_XOR: throw new ScriptException("Attempted to use disabled Script Op."); case OP_EQUAL: if (stack.size() < 2) throw new ScriptException("Attempted OP_EQUALVERIFY on a stack with size < 2"); stack.add(Arrays.equals(stack.pollLast(), stack.pollLast()) ? new byte[] {1} : new byte[] {0}); break; case OP_EQUALVERIFY: if (stack.size() < 2) throw new ScriptException("Attempted OP_EQUALVERIFY on a stack with size < 2"); if (!Arrays.equals(stack.pollLast(), stack.pollLast())) throw new ScriptException("OP_EQUALVERIFY: non-equal data"); break; case OP_1ADD: case OP_1SUB: case OP_NEGATE: case OP_ABS: case OP_NOT: case OP_0NOTEQUAL: if (stack.size() < 1) throw new ScriptException("Attempted a numeric op on an empty stack"); BigInteger numericOPnum = castToBigInteger(stack.pollLast()); switch (opcode) { case OP_1ADD: numericOPnum = numericOPnum.add(BigInteger.ONE); break; case OP_1SUB: numericOPnum = numericOPnum.subtract(BigInteger.ONE); break; case OP_NEGATE: numericOPnum = numericOPnum.negate(); break; case OP_ABS: if (numericOPnum.signum() < 0) numericOPnum = numericOPnum.negate(); break; case OP_NOT: if (numericOPnum.equals(BigInteger.ZERO)) numericOPnum = BigInteger.ONE; else numericOPnum = BigInteger.ZERO; break; case OP_0NOTEQUAL: if (numericOPnum.equals(BigInteger.ZERO)) numericOPnum = BigInteger.ZERO; else numericOPnum = BigInteger.ONE; break; default: throw new AssertionError("Unreachable"); } stack.add(Utils.reverseBytes(Utils.encodeMPI(numericOPnum, false))); break; case OP_2MUL: case OP_2DIV: throw new ScriptException("Attempted to use disabled Script Op."); case OP_ADD: case OP_SUB: case OP_BOOLAND: case OP_BOOLOR: case OP_NUMEQUAL: case OP_NUMNOTEQUAL: case OP_LESSTHAN: case OP_GREATERTHAN: case OP_LESSTHANOREQUAL: case OP_GREATERTHANOREQUAL: case OP_MIN: case OP_MAX: if (stack.size() < 2) throw new ScriptException("Attempted a numeric op on a stack with size < 2"); BigInteger numericOPnum2 = castToBigInteger(stack.pollLast()); BigInteger numericOPnum1 = castToBigInteger(stack.pollLast()); BigInteger numericOPresult; switch (opcode) { case OP_ADD: numericOPresult = numericOPnum1.add(numericOPnum2); break; case OP_SUB: numericOPresult = numericOPnum1.subtract(numericOPnum2); break; case OP_BOOLAND: if (!numericOPnum1.equals(BigInteger.ZERO) && !numericOPnum2.equals(BigInteger.ZERO)) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_BOOLOR: if (!numericOPnum1.equals(BigInteger.ZERO) || !numericOPnum2.equals(BigInteger.ZERO)) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_NUMEQUAL: if (numericOPnum1.equals(numericOPnum2)) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_NUMNOTEQUAL: if (!numericOPnum1.equals(numericOPnum2)) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_LESSTHAN: if (numericOPnum1.compareTo(numericOPnum2) < 0) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_GREATERTHAN: if (numericOPnum1.compareTo(numericOPnum2) > 0) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_LESSTHANOREQUAL: if (numericOPnum1.compareTo(numericOPnum2) <= 0) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_GREATERTHANOREQUAL: if (numericOPnum1.compareTo(numericOPnum2) >= 0) numericOPresult = BigInteger.ONE; else numericOPresult = BigInteger.ZERO; break; case OP_MIN: if (numericOPnum1.compareTo(numericOPnum2) < 0) numericOPresult = numericOPnum1; else numericOPresult = numericOPnum2; break; case OP_MAX: if (numericOPnum1.compareTo(numericOPnum2) > 0) numericOPresult = numericOPnum1; else numericOPresult = numericOPnum2; break; default: throw new RuntimeException("Opcode switched at runtime?"); } stack.add(Utils.reverseBytes(Utils.encodeMPI(numericOPresult, false))); break; case OP_MUL: case OP_DIV: case OP_MOD: case OP_LSHIFT: case OP_RSHIFT: throw new ScriptException("Attempted to use disabled Script Op."); case OP_NUMEQUALVERIFY: if (stack.size() < 2) throw new ScriptException("Attempted OP_NUMEQUALVERIFY on a stack with size < 2"); BigInteger OPNUMEQUALVERIFYnum2 = castToBigInteger(stack.pollLast()); BigInteger OPNUMEQUALVERIFYnum1 = castToBigInteger(stack.pollLast()); if (!OPNUMEQUALVERIFYnum1.equals(OPNUMEQUALVERIFYnum2)) throw new ScriptException("OP_NUMEQUALVERIFY failed"); break; case OP_WITHIN: if (stack.size() < 3) throw new ScriptException("Attempted OP_WITHIN on a stack with size < 3"); BigInteger OPWITHINnum3 = castToBigInteger(stack.pollLast()); BigInteger OPWITHINnum2 = castToBigInteger(stack.pollLast()); BigInteger OPWITHINnum1 = castToBigInteger(stack.pollLast()); if (OPWITHINnum2.compareTo(OPWITHINnum1) <= 0 && OPWITHINnum1.compareTo(OPWITHINnum3) < 0) stack.add(Utils.reverseBytes(Utils.encodeMPI(BigInteger.ONE, false))); else stack.add(Utils.reverseBytes(Utils.encodeMPI(BigInteger.ZERO, false))); break; case OP_RIPEMD160: if (stack.size() < 1) throw new ScriptException("Attempted OP_RIPEMD160 on an empty stack"); RIPEMD160Digest digest = new RIPEMD160Digest(); byte[] dataToHash = stack.pollLast(); digest.update(dataToHash, 0, dataToHash.length); byte[] ripmemdHash = new byte[20]; digest.doFinal(ripmemdHash, 0); stack.add(ripmemdHash); break; case OP_SHA1: if (stack.size() < 1) throw new ScriptException("Attempted OP_SHA1 on an empty stack"); try { stack.add(MessageDigest.getInstance("SHA-1").digest(stack.pollLast())); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); // Cannot happen. } break; case OP_SHA256: if (stack.size() < 1) throw new ScriptException("Attempted OP_SHA256 on an empty stack"); try { stack.add(MessageDigest.getInstance("SHA-256").digest(stack.pollLast())); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); // Cannot happen. } break; case OP_HASH160: if (stack.size() < 1) throw new ScriptException("Attempted OP_HASH160 on an empty stack"); stack.add(Utils.sha256hash160(stack.pollLast())); break; case OP_HASH256: if (stack.size() < 1) throw new ScriptException("Attempted OP_SHA256 on an empty stack"); stack.add(Utils.doubleDigest(stack.pollLast())); break; case OP_CODESEPARATOR: lastCodeSepLocation = chunk.getStartLocationInProgram() + 1; break; case OP_CHECKSIG: case OP_CHECKSIGVERIFY: executeCheckSig(txContainingThis, (int) index, script, stack, lastCodeSepLocation, opcode); break; case OP_CHECKMULTISIG: case OP_CHECKMULTISIGVERIFY: opCount = executeMultiSig(txContainingThis, (int) index, script, stack, opCount, lastCodeSepLocation, opcode); break; case OP_NOP1: case OP_NOP2: case OP_NOP3: case OP_NOP4: case OP_NOP5: case OP_NOP6: case OP_NOP7: case OP_NOP8: case OP_NOP9: case OP_NOP10: break; default: throw new ScriptException("Script used a reserved opcode " + opcode); } } if (stack.size() + altstack.size() > 1000 || stack.size() + altstack.size() < 0) throw new ScriptException("Stack size exceeded range"); } if (!ifStack.isEmpty()) throw new ScriptException("OP_IF/OP_NOTIF without OP_ENDIF"); } private static void executeCheckSig(Transaction txContainingThis, int index, Script script, LinkedList<byte[]> stack, int lastCodeSepLocation, int opcode) throws ScriptException { if (stack.size() < 2) throw new ScriptException("Attempted OP_CHECKSIG(VERIFY) on a stack with size < 2"); byte[] pubKey = stack.pollLast(); byte[] sigBytes = stack.pollLast(); byte[] prog = script.getProgram(); byte[] connectedScript = Arrays.copyOfRange(prog, lastCodeSepLocation, prog.length); UnsafeByteArrayOutputStream outStream = new UnsafeByteArrayOutputStream(sigBytes.length + 1); try { writeBytes(outStream, sigBytes); } catch (IOException e) { throw new RuntimeException(e); // Cannot happen } connectedScript = removeAllInstancesOf(connectedScript, outStream.toByteArray()); // TODO: Use int for indexes everywhere, we can't have that many inputs/outputs boolean sigValid = false; try { TransactionSignature sig = TransactionSignature.decodeFromBitcoin(sigBytes, false); Sha256Hash hash = txContainingThis.hashForSignature(index, connectedScript, (byte) sig.sighashFlags); sigValid = ECKey.verify(hash.getBytes(), sig, pubKey); } catch (Exception e1) { // There is (at least) one exception that could be hit here (EOFException, if the sig is too short) // Because I can't verify there aren't more, we use a very generic Exception catch log.warn(e1.toString()); } if (opcode == OP_CHECKSIG) stack.add(sigValid ? new byte[] {1} : new byte[] {0}); else if (opcode == OP_CHECKSIGVERIFY) if (!sigValid) throw new ScriptException("Script failed OP_CHECKSIGVERIFY"); } private static int executeMultiSig(Transaction txContainingThis, int index, Script script, LinkedList<byte[]> stack, int opCount, int lastCodeSepLocation, int opcode) throws ScriptException { if (stack.size() < 2) throw new ScriptException("Attempted OP_CHECKMULTISIG(VERIFY) on a stack with size < 2"); int pubKeyCount = castToBigInteger(stack.pollLast()).intValue(); if (pubKeyCount < 0 || pubKeyCount > 20) throw new ScriptException("OP_CHECKMULTISIG(VERIFY) with pubkey count out of range"); opCount += pubKeyCount; if (opCount > 201) throw new ScriptException("Total op count > 201 during OP_CHECKMULTISIG(VERIFY)"); if (stack.size() < pubKeyCount + 1) throw new ScriptException("Attempted OP_CHECKMULTISIG(VERIFY) on a stack with size < num_of_pubkeys + 2"); LinkedList<byte[]> pubkeys = new LinkedList<byte[]>(); for (int i = 0; i < pubKeyCount; i++) { byte[] pubKey = stack.pollLast(); pubkeys.add(pubKey); } int sigCount = castToBigInteger(stack.pollLast()).intValue(); if (sigCount < 0 || sigCount > pubKeyCount) throw new ScriptException("OP_CHECKMULTISIG(VERIFY) with sig count out of range"); if (stack.size() < sigCount + 1) throw new ScriptException("Attempted OP_CHECKMULTISIG(VERIFY) on a stack with size < num_of_pubkeys + num_of_signatures + 3"); LinkedList<byte[]> sigs = new LinkedList<byte[]>(); for (int i = 0; i < sigCount; i++) { byte[] sig = stack.pollLast(); sigs.add(sig); } byte[] prog = script.getProgram(); byte[] connectedScript = Arrays.copyOfRange(prog, lastCodeSepLocation, prog.length); for (byte[] sig : sigs) { UnsafeByteArrayOutputStream outStream = new UnsafeByteArrayOutputStream(sig.length + 1); try { writeBytes(outStream, sig); } catch (IOException e) { throw new RuntimeException(e); // Cannot happen } connectedScript = removeAllInstancesOf(connectedScript, outStream.toByteArray()); } boolean valid = true; while (sigs.size() > 0) { byte[] pubKey = pubkeys.pollFirst(); // We could reasonably move this out of the loop, but because signature verification is significantly // more expensive than hashing, its not a big deal. try { TransactionSignature sig = TransactionSignature.decodeFromBitcoin(sigs.getFirst(), false); Sha256Hash hash = txContainingThis.hashForSignature(index, connectedScript, (byte) sig.sighashFlags); if (ECKey.verify(hash.getBytes(), sig, pubKey)) sigs.pollFirst(); } catch (Exception e) { // There is (at least) one exception that could be hit here (EOFException, if the sig is too short) // Because I can't verify there aren't more, we use a very generic Exception catch } if (sigs.size() > pubkeys.size()) { valid = false; break; } } // We uselessly remove a stack object to emulate a reference client bug. stack.pollLast(); if (opcode == OP_CHECKMULTISIG) { stack.add(valid ? new byte[] {1} : new byte[] {0}); } else if (opcode == OP_CHECKMULTISIGVERIFY) { if (!valid) throw new ScriptException("Script failed OP_CHECKMULTISIGVERIFY"); } return opCount; } /** * Verifies that this script (interpreted as a scriptSig) correctly spends the given scriptPubKey. * @param txContainingThis The transaction in which this input scriptSig resides. * Accessing txContainingThis from another thread while this method runs results in undefined behavior. * @param scriptSigIndex The index in txContainingThis of the scriptSig (note: NOT the index of the scriptPubKey). * @param scriptPubKey The connected scriptPubKey containing the conditions needed to claim the value. * @param enforceP2SH Whether "pay to script hash" rules should be enforced. If in doubt, set to true. */ public void correctlySpends(Transaction txContainingThis, long scriptSigIndex, Script scriptPubKey, boolean enforceP2SH) throws ScriptException { // Clone the transaction because executing the script involves editing it, and if we die, we'll leave // the tx half broken (also it's not so thread safe to work on it directly. try { txContainingThis = new Transaction(txContainingThis.getParams(), txContainingThis.bitcoinSerialize()); } catch (ProtocolException e) { throw new RuntimeException(e); // Should not happen unless we were given a totally broken transaction. } if (getProgram().length > 10000 || scriptPubKey.getProgram().length > 10000) throw new ScriptException("Script larger than 10,000 bytes"); LinkedList<byte[]> stack = new LinkedList<byte[]>(); LinkedList<byte[]> p2shStack = null; executeScript(txContainingThis, scriptSigIndex, this, stack); if (enforceP2SH) p2shStack = new LinkedList<byte[]>(stack); executeScript(txContainingThis, scriptSigIndex, scriptPubKey, stack); if (stack.size() == 0) throw new ScriptException("Stack empty at end of script execution."); if (!castToBool(stack.pollLast())) throw new ScriptException("Script resulted in a non-true stack: " + stack); // P2SH is pay to script hash. It means that the scriptPubKey has a special form which is a valid // program but it has "useless" form that if evaluated as a normal program always returns true. // Instead, miners recognize it as special based on its template - it provides a hash of the real scriptPubKey // and that must be provided by the input. The goal of this bizarre arrangement is twofold: // // (1) You can sum up a large, complex script (like a CHECKMULTISIG script) with an address that's the same // size as a regular address. This means it doesn't overload scannable QR codes/NFC tags or become // un-wieldy to copy/paste. // (2) It allows the working set to be smaller: nodes perform best when they can store as many unspent outputs // in RAM as possible, so if the outputs are made smaller and the inputs get bigger, then it's better for // overall scalability and performance. // TODO: Check if we can take out enforceP2SH if there's a checkpoint at the enforcement block. if (enforceP2SH && scriptPubKey.isPayToScriptHash()) { for (ScriptChunk chunk : chunks) if (chunk.isOpCode() && (chunk.data[0] & 0xff) > OP_16) throw new ScriptException("Attempted to spend a P2SH scriptPubKey with a script that contained script ops"); byte[] scriptPubKeyBytes = p2shStack.pollLast(); Script scriptPubKeyP2SH = new Script(scriptPubKeyBytes); executeScript(txContainingThis, scriptSigIndex, scriptPubKeyP2SH, p2shStack); if (p2shStack.size() == 0) throw new ScriptException("P2SH stack empty at end of script execution."); if (!castToBool(p2shStack.pollLast())) throw new ScriptException("P2SH script execution resulted in a non-true stack"); } } // Utility that doesn't copy for internal use private byte[] getQuickProgram() { if (program != null) return program; return getProgram(); } @Override public boolean equals(Object obj) { if (!(obj instanceof Script)) return false; Script s = (Script)obj; return Arrays.equals(getQuickProgram(), s.getQuickProgram()); } @Override public int hashCode() { byte[] bytes = getQuickProgram(); return Arrays.hashCode(bytes); } }
cbeams/bitcoinj
core/src/main/java/com/google/bitcoin/script/Script.java
Java
apache-2.0
61,134
extern crate libc; extern crate serde_json; use api::{ErrorCode, IndyHandle, CommandHandle, WalletHandle, SearchHandle}; use errors::prelude::*; use commands::{Command, CommandExecutor}; use commands::anoncreds::AnoncredsCommand; use commands::anoncreds::issuer::IssuerCommand; use commands::anoncreds::prover::ProverCommand; use commands::anoncreds::verifier::VerifierCommand; use domain::anoncreds::schema::{Schema, AttributeNames}; use domain::anoncreds::credential_definition::{CredentialDefinition, CredentialDefinitionConfig}; use domain::anoncreds::credential_offer::CredentialOffer; use domain::anoncreds::credential_request::{CredentialRequest, CredentialRequestMetadata}; use domain::anoncreds::credential::{Credential, AttributeValues}; use domain::anoncreds::revocation_registry_definition::{RevocationRegistryConfig, RevocationRegistryDefinition}; use domain::anoncreds::revocation_registry_delta::RevocationRegistryDelta; use domain::anoncreds::proof::Proof; use domain::anoncreds::proof_request::{ProofRequest, ProofRequestExtraQuery}; use domain::anoncreds::requested_credential::RequestedCredentials; use domain::anoncreds::revocation_registry::RevocationRegistry; use domain::anoncreds::revocation_state::RevocationState; use utils::ctypes; use self::libc::c_char; use std::ptr; use std::collections::HashMap; /// Create credential schema entity that describes credential attributes list and allows credentials /// interoperability. /// /// Schema is public and intended to be shared with all anoncreds workflow actors usually by publishing SCHEMA transaction /// to Indy distributed ledger. /// /// It is IMPORTANT for current version POST Schema in Ledger and after that GET it from Ledger /// with correct seq_no to save compatibility with Ledger. /// After that can call indy_issuer_create_and_store_credential_def to build corresponding Credential Definition. /// /// #Params /// command_handle: command handle to map callback to user context /// issuer_did: DID of schema issuer /// name: a name the schema /// version: a version of the schema /// attrs: a list of schema attributes descriptions (the number of attributes should be less or equal than 125) /// cb: Callback that takes command result as parameter /// /// #Returns /// schema_id: identifier of created schema /// schema_json: schema as json /// /// #Errors /// Common* /// Anoncreds* #[no_mangle] pub extern fn indy_issuer_create_schema(command_handle: CommandHandle, issuer_did: *const c_char, name: *const c_char, version: *const c_char, attrs: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, schema_id: *const c_char, schema_json: *const c_char)>) -> ErrorCode { trace!("indy_issuer_create_schema: >>> issuer_did: {:?}, name: {:?}, version: {:?}, attrs: {:?}", issuer_did, name, version, attrs); check_useful_c_str!(issuer_did, ErrorCode::CommonInvalidParam2); check_useful_c_str!(name, ErrorCode::CommonInvalidParam3); check_useful_c_str!(version, ErrorCode::CommonInvalidParam4); check_useful_json!(attrs, ErrorCode::CommonInvalidParam5, AttributeNames); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam6); trace!("indy_issuer_create_schema: entity >>> issuer_did: {:?}, name: {:?}, version: {:?}, attrs: {:?}", issuer_did, name, version, attrs); if attrs.is_empty() { return err_msg(IndyErrorKind::InvalidStructure, "Empty list of Schema attributes has been passed").into(); } let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::CreateSchema( issuer_did, name, version, attrs, Box::new(move |result| { let (err, id, schema_json) = prepare_result_2!(result, String::new(), String::new()); trace!("indy_crypto_cl_credential_public_key_to_json: id: {:?}, schema_json: {:?}", id, schema_json); let id = ctypes::string_to_cstring(id); let schema_json = ctypes::string_to_cstring(schema_json); cb(command_handle, err, id.as_ptr(), schema_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_issuer_create_schema: <<< res: {:?}", res); res } /// Create credential definition entity that encapsulates credentials issuer DID, credential schema, secrets used for signing credentials /// and secrets used for credentials revocation. /// /// Credential definition entity contains private and public parts. Private part will be stored in the wallet. Public part /// will be returned as json intended to be shared with all anoncreds workflow actors usually by publishing CRED_DEF transaction /// to Indy distributed ledger. /// /// It is IMPORTANT for current version GET Schema from Ledger with correct seq_no to save compatibility with Ledger. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// command_handle: command handle to map callback to user context. /// issuer_did: a DID of the issuer signing cred_def transaction to the Ledger /// schema_json: credential schema as a json /// tag: allows to distinct between credential definitions for the same issuer and schema /// signature_type: credential definition type (optional, 'CL' by default) that defines credentials signature and revocation math. Supported types are: /// - 'CL': Camenisch-Lysyanskaya credential signature type /// config_json: (optional) type-specific configuration of credential definition as json: /// - 'CL': /// - support_revocation: whether to request non-revocation credential (optional, default false) /// cb: Callback that takes command result as parameter. /// /// #Returns /// cred_def_id: identifier of created credential definition /// cred_def_json: public part of created credential definition /// /// #Errors /// Common* /// Wallet* /// Anoncreds* #[no_mangle] pub extern fn indy_issuer_create_and_store_credential_def(command_handle: CommandHandle, wallet_handle: WalletHandle, issuer_did: *const c_char, schema_json: *const c_char, tag: *const c_char, signature_type: *const c_char, config_json: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, cred_def_id: *const c_char, cred_def_json: *const c_char)>) -> ErrorCode { trace!("indy_issuer_create_and_store_credential_def: >>> wallet_handle: {:?}, issuer_did: {:?}, schema_json: {:?}, tag: {:?}, \ signature_type: {:?}, config_json: {:?}", wallet_handle, issuer_did, schema_json, tag, signature_type, config_json); check_useful_c_str!(issuer_did, ErrorCode::CommonInvalidParam3); check_useful_json!(schema_json, ErrorCode::CommonInvalidParam4, Schema); check_useful_c_str!(tag, ErrorCode::CommonInvalidParam5); check_useful_opt_c_str!(signature_type, ErrorCode::CommonInvalidParam6); check_useful_opt_json!(config_json, ErrorCode::CommonInvalidParam7, CredentialDefinitionConfig); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8); trace!("indy_issuer_create_and_store_credential_def: entities >>> wallet_handle: {:?}, issuer_did: {:?}, schema_json: {:?}, tag: {:?}, \ signature_type: {:?}, config_json: {:?}", wallet_handle, issuer_did, schema_json, tag, signature_type, config_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::CreateAndStoreCredentialDefinition( wallet_handle, issuer_did, schema_json, tag, signature_type, config_json, Box::new(move |result| { let (err, cred_def_id, cred_def_json) = prepare_result_2!(result, String::new(), String::new()); trace!("indy_issuer_create_and_store_credential_def: cred_def_id: {:?}, cred_def_json: {:?}", cred_def_id, cred_def_json); let cred_def_id = ctypes::string_to_cstring(cred_def_id); let cred_def_json = ctypes::string_to_cstring(cred_def_json); cb(command_handle, err, cred_def_id.as_ptr(), cred_def_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_issuer_create_and_store_credential_def: <<< res: {:?}", res); res } /// Create a new revocation registry for the given credential definition as tuple of entities /// - Revocation registry definition that encapsulates credentials definition reference, revocation type specific configuration and /// secrets used for credentials revocation /// - Revocation registry state that stores the information about revoked entities in a non-disclosing way. The state can be /// represented as ordered list of revocation registry entries were each entry represents the list of revocation or issuance operations. /// /// Revocation registry definition entity contains private and public parts. Private part will be stored in the wallet. Public part /// will be returned as json intended to be shared with all anoncreds workflow actors usually by publishing REVOC_REG_DEF transaction /// to Indy distributed ledger. /// /// Revocation registry state is stored on the wallet and also intended to be shared as the ordered list of REVOC_REG_ENTRY transactions. /// This call initializes the state in the wallet and returns the initial entry. /// /// Some revocation registry types (for example, 'CL_ACCUM') can require generation of binary blob called tails used to hide information about revoked credentials in public /// revocation registry and intended to be distributed out of leger (REVOC_REG_DEF transaction will still contain uri and hash of tails). /// This call requires access to pre-configured blob storage writer instance handle that will allow to write generated tails. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// command_handle: command handle to map callback to user context. /// issuer_did: a DID of the issuer signing transaction to the Ledger /// revoc_def_type: revocation registry type (optional, default value depends on credential definition type). Supported types are: /// - 'CL_ACCUM': Type-3 pairing based accumulator. Default for 'CL' credential definition type /// tag: allows to distinct between revocation registries for the same issuer and credential definition /// cred_def_id: id of stored in ledger credential definition /// config_json: type-specific configuration of revocation registry as json: /// - 'CL_ACCUM': { /// "issuance_type": (optional) type of issuance. Currently supported: /// 1) ISSUANCE_BY_DEFAULT: all indices are assumed to be issued and initial accumulator is calculated over all indices; /// Revocation Registry is updated only during revocation. /// 2) ISSUANCE_ON_DEMAND: nothing is issued initially accumulator is 1 (used by default); /// "max_cred_num": maximum number of credentials the new registry can process (optional, default 100000) /// } /// tails_writer_handle: handle of blob storage to store tails /// cb: Callback that takes command result as parameter. /// /// #Returns /// revoc_reg_id: identifier of created revocation registry definition /// revoc_reg_def_json: public part of revocation registry definition /// revoc_reg_entry_json: revocation registry entry that defines initial state of revocation registry /// /// #Errors /// Common* /// Wallet* /// Anoncreds* #[no_mangle] pub extern fn indy_issuer_create_and_store_revoc_reg(command_handle: CommandHandle, wallet_handle: WalletHandle, issuer_did: *const c_char, revoc_def_type: *const c_char, tag: *const c_char, cred_def_id: *const c_char, config_json: *const c_char, tails_writer_handle: IndyHandle, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, revoc_reg_id: *const c_char, revoc_reg_def_json: *const c_char, revoc_reg_entry_json: *const c_char)>) -> ErrorCode { trace!("indy_issuer_create_and_store_credential_def: >>> wallet_handle: {:?}, issuer_did: {:?}, revoc_def_type: {:?}, tag: {:?}, \ cred_def_id: {:?}, config_json: {:?}, tails_writer_handle: {:?}", wallet_handle, issuer_did, revoc_def_type, tag, cred_def_id, config_json, tails_writer_handle); check_useful_c_str!(issuer_did, ErrorCode::CommonInvalidParam3); check_useful_opt_c_str!(revoc_def_type, ErrorCode::CommonInvalidParam4); check_useful_c_str!(tag, ErrorCode::CommonInvalidParam5); check_useful_c_str!(cred_def_id, ErrorCode::CommonInvalidParam6); check_useful_json!(config_json, ErrorCode::CommonInvalidParam7, RevocationRegistryConfig); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam9); trace!("indy_issuer_create_and_store_credential_def: entities >>> wallet_handle: {:?}, issuer_did: {:?}, revoc_def_type: {:?}, tag: {:?}, \ cred_def_id: {:?}, config_json: {:?}, tails_writer_handle: {:?}", wallet_handle, issuer_did, revoc_def_type, tag, cred_def_id, config_json, tails_writer_handle); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::CreateAndStoreRevocationRegistry( wallet_handle, issuer_did, revoc_def_type, tag, cred_def_id, config_json, tails_writer_handle, Box::new(move |result| { let (err, revoc_reg_id, revoc_reg_def_json, revoc_reg_json) = prepare_result_3!(result, String::new(), String::new(), String::new()); trace!("indy_issuer_create_and_store_credential_def: revoc_reg_id: {:?}, revoc_reg_def_json: {:?}, revoc_reg_json: {:?}", revoc_reg_id, revoc_reg_def_json, revoc_reg_json); let revoc_reg_id = ctypes::string_to_cstring(revoc_reg_id); let revoc_reg_def_json = ctypes::string_to_cstring(revoc_reg_def_json); let revoc_reg_json = ctypes::string_to_cstring(revoc_reg_json); cb(command_handle, err, revoc_reg_id.as_ptr(), revoc_reg_def_json.as_ptr(), revoc_reg_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_issuer_create_and_store_credential_def: <<< res: {:?}", res); res } /// Create credential offer that will be used by Prover for /// credential request creation. Offer includes nonce and key correctness proof /// for authentication between protocol steps and integrity checking. /// /// #Params /// command_handle: command handle to map callback to user context /// wallet_handle: wallet handler (created by open_wallet) /// cred_def_id: id of credential definition stored in the wallet /// cb: Callback that takes command result as parameter /// /// #Returns /// credential offer json: /// { /// "schema_id": string, /// "cred_def_id": string, /// // Fields below can depend on Cred Def type /// "nonce": string, /// "key_correctness_proof" : <key_correctness_proof> /// } /// /// #Errors /// Common* /// Wallet* /// Anoncreds* #[no_mangle] pub extern fn indy_issuer_create_credential_offer(command_handle: CommandHandle, wallet_handle: WalletHandle, cred_def_id: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, cred_offer_json: *const c_char)>) -> ErrorCode { trace!("indy_issuer_create_credential_offer: >>> wallet_handle: {:?}, cred_def_id: {:?}", wallet_handle, cred_def_id); check_useful_c_str!(cred_def_id, ErrorCode::CommonInvalidParam3); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_issuer_create_credential_offer: entities >>> wallet_handle: {:?}, cred_def_id: {:?}", wallet_handle, cred_def_id); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::CreateCredentialOffer( wallet_handle, cred_def_id, Box::new(move |result| { let (err, cred_offer_json) = prepare_result_1!(result, String::new()); trace!("indy_issuer_create_credential_offer: cred_offer_json: {:?}", cred_offer_json); let cred_offer_json = ctypes::string_to_cstring(cred_offer_json); cb(command_handle, err, cred_offer_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_issuer_create_credential_offer: <<< res: {:?}", res); res } /// Check Cred Request for the given Cred Offer and issue Credential for the given Cred Request. /// /// Cred Request must match Cred Offer. The credential definition and revocation registry definition /// referenced in Cred Offer and Cred Request must be already created and stored into the wallet. /// /// Information for this credential revocation will be store in the wallet as part of revocation registry under /// generated cred_revoc_id local for this wallet. /// /// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction. /// Note that it is possible to accumulate deltas to reduce ledger load. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// command_handle: command handle to map callback to user context. /// cred_offer_json: a cred offer created by indy_issuer_create_credential_offer /// cred_req_json: a credential request created by indy_prover_create_credential_req /// cred_values_json: a credential containing attribute values for each of requested attribute names. /// Example: /// { /// "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, /// "attr2" : {"raw": "value1", "encoded": "value1_as_int" } /// } /// rev_reg_id: id of revocation registry stored in the wallet /// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails /// cb: Callback that takes command result as parameter. /// /// #Returns /// cred_json: Credential json containing signed credential values /// { /// "schema_id": string, /// "cred_def_id": string, /// "rev_reg_def_id", Optional<string>, /// "values": <see cred_values_json above>, /// // Fields below can depend on Cred Def type /// "signature": <signature>, /// "signature_correctness_proof": <signature_correctness_proof> /// } /// cred_revoc_id: local id for revocation info (Can be used for revocation of this credential) /// revoc_reg_delta_json: Revocation registry delta json with a newly issued credential /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_issuer_create_credential(command_handle: CommandHandle, wallet_handle: WalletHandle, cred_offer_json: *const c_char, cred_req_json: *const c_char, cred_values_json: *const c_char, rev_reg_id: *const c_char, blob_storage_reader_handle: IndyHandle, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, cred_json: *const c_char, cred_revoc_id: *const c_char, revoc_reg_delta_json: *const c_char)>) -> ErrorCode { trace!("indy_issuer_create_credential: >>> wallet_handle: {:?}, cred_offer_json: {:?}, cred_req_json: {:?}, cred_values_json: {:?}, rev_reg_id: {:?}, \ blob_storage_reader_handle: {:?}", wallet_handle, cred_offer_json, cred_req_json, cred_values_json, rev_reg_id, blob_storage_reader_handle); check_useful_json!(cred_offer_json, ErrorCode::CommonInvalidParam3, CredentialOffer); check_useful_json!(cred_req_json, ErrorCode::CommonInvalidParam4, CredentialRequest); check_useful_json!(cred_values_json, ErrorCode::CommonInvalidParam5, HashMap<String, AttributeValues>); check_useful_opt_c_str!(rev_reg_id, ErrorCode::CommonInvalidParam6); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8); let blob_storage_reader_handle = if blob_storage_reader_handle != -1 { Some(blob_storage_reader_handle) } else { None }; trace!("indy_issuer_create_credential: entities >>> wallet_handle: {:?}, cred_offer_json: {:?}, cred_req_json: {:?}, cred_values_json: {:?}, rev_reg_id: {:?}, \ blob_storage_reader_handle: {:?}", wallet_handle, cred_offer_json, secret!(&cred_req_json), secret!(&cred_values_json), secret!(&rev_reg_id), blob_storage_reader_handle); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::CreateCredential( wallet_handle, cred_offer_json, cred_req_json, cred_values_json, rev_reg_id, blob_storage_reader_handle, Box::new(move |result| { let (err, cred_json, revoc_id, revoc_reg_delta_json) = prepare_result_3!(result, String::new(), None, None); trace!("indy_issuer_create_credential: cred_json: {:?}, revoc_id: {:?}, revoc_reg_delta_json: {:?}", secret!(cred_json.as_str()), secret!(&revoc_id), revoc_reg_delta_json); let cred_json = ctypes::string_to_cstring(cred_json); let revoc_id = revoc_id.map(ctypes::string_to_cstring); let revoc_reg_delta_json = revoc_reg_delta_json.map(ctypes::string_to_cstring); cb(command_handle, err, cred_json.as_ptr(), revoc_id.as_ref().map(|id| id.as_ptr()).unwrap_or(ptr::null()), revoc_reg_delta_json.as_ref().map(|delta| delta.as_ptr()).unwrap_or(ptr::null())) }) )))); let res = prepare_result!(result); trace!("indy_issuer_create_credential: <<< res: {:?}", res); res } /// Revoke a credential identified by a cred_revoc_id (returned by indy_issuer_create_credential). /// /// The corresponding credential definition and revocation registry must be already /// created an stored into the wallet. /// /// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction. /// Note that it is possible to accumulate deltas to reduce ledger load. /// /// #Params /// command_handle: command handle to map callback to user context. /// wallet_handle: wallet handler (created by open_wallet). /// blob_storage_reader_cfg_handle: configuration of blob storage reader handle that will allow to read revocation tails /// rev_reg_id: id of revocation registry stored in wallet /// cred_revoc_id: local id for revocation info /// cb: Callback that takes command result as parameter. /// /// #Returns /// revoc_reg_delta_json: Revocation registry delta json with a revoked credential /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_issuer_revoke_credential(command_handle: CommandHandle, wallet_handle: WalletHandle, blob_storage_reader_cfg_handle: IndyHandle, rev_reg_id: *const c_char, cred_revoc_id: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, revoc_reg_delta_json: *const c_char)>) -> ErrorCode { trace!("indy_issuer_revoke_credential: >>> wallet_handle: {:?}, blob_storage_reader_cfg_handle: {:?}, rev_reg_id: {:?}, cred_revoc_id: {:?}", wallet_handle, blob_storage_reader_cfg_handle, rev_reg_id, cred_revoc_id); check_useful_c_str!(rev_reg_id, ErrorCode::CommonInvalidParam4); check_useful_c_str!(cred_revoc_id, ErrorCode::CommonInvalidParam5); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam6); trace!("indy_issuer_revoke_credential: entities >>> wallet_handle: {:?}, blob_storage_reader_cfg_handle: {:?}, rev_reg_id: {:?}, cred_revoc_id: {:?}", wallet_handle, blob_storage_reader_cfg_handle, rev_reg_id, secret!(cred_revoc_id.as_str())); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::RevokeCredential( wallet_handle, blob_storage_reader_cfg_handle, rev_reg_id, cred_revoc_id, Box::new(move |result| { let (err, revoc_reg_update_json) = prepare_result_1!(result, String::new()); trace!("indy_issuer_revoke_credential: revoc_reg_update_json: {:?}", revoc_reg_update_json); let revoc_reg_update_json = ctypes::string_to_cstring(revoc_reg_update_json); cb(command_handle, err, revoc_reg_update_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_issuer_revoke_credential: <<< res: {:?}", res); res } /*/// Recover a credential identified by a cred_revoc_id (returned by indy_issuer_create_credential). /// /// The corresponding credential definition and revocation registry must be already /// created an stored into the wallet. /// /// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction. /// Note that it is possible to accumulate deltas to reduce ledger load. /// /// #Params /// command_handle: command handle to map callback to user context. /// wallet_handle: wallet handler (created by open_wallet). /// blob_storage_reader_cfg_handle: configuration of blob storage reader handle that will allow to read revocation tails /// rev_reg_id: id of revocation registry stored in wallet /// cred_revoc_id: local id for revocation info /// cb: Callback that takes command result as parameter. /// /// #Returns /// revoc_reg_delta_json: Revocation registry delta json with a recovered credential /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_issuer_recover_credential(command_handle: CommandHandle, wallet_handle: WalletHandle, blob_storage_reader_cfg_handle: IndyHandle, rev_reg_id: *const c_char, cred_revoc_id: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, revoc_reg_delta_json: *const c_char, )>) -> ErrorCode { check_useful_c_str!(rev_reg_id, ErrorCode::CommonInvalidParam4); check_useful_c_str!(cred_revoc_id, ErrorCode::CommonInvalidParam5); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam6); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::RecoverCredential( wallet_handle, blob_storage_reader_cfg_handle, rev_reg_id, cred_revoc_id, Box::new(move |result| { let (err, revoc_reg_update_json) = prepare_result_1!(result, String::new()); let revoc_reg_update_json = ctypes::string_to_cstring(revoc_reg_update_json); cb(command_handle, err, revoc_reg_update_json.as_ptr()) }) )))); prepare_result!(result) }*/ /// Merge two revocation registry deltas (returned by indy_issuer_create_credential or indy_issuer_revoke_credential) to accumulate common delta. /// Send common delta to ledger to reduce the load. /// /// #Params /// command_handle: command handle to map callback to user context. /// rev_reg_delta_json: revocation registry delta. /// other_rev_reg_delta_json: revocation registry delta for which PrevAccum value is equal to current accum value of rev_reg_delta_json. /// cb: Callback that takes command result as parameter. /// /// #Returns /// merged_rev_reg_delta: Merged revocation registry delta /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_issuer_merge_revocation_registry_deltas(command_handle: CommandHandle, rev_reg_delta_json: *const c_char, other_rev_reg_delta_json: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, merged_rev_reg_delta: *const c_char)>) -> ErrorCode { trace!("indy_issuer_merge_revocation_registry_deltas: >>> rev_reg_delta_json: {:?}, other_rev_reg_delta_json: {:?}", rev_reg_delta_json, other_rev_reg_delta_json); check_useful_json!(rev_reg_delta_json, ErrorCode::CommonInvalidParam2, RevocationRegistryDelta); check_useful_json!(other_rev_reg_delta_json, ErrorCode::CommonInvalidParam3, RevocationRegistryDelta); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_issuer_merge_revocation_registry_deltas: entities >>> rev_reg_delta_json: {:?}, other_rev_reg_delta_json: {:?}", rev_reg_delta_json, other_rev_reg_delta_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Issuer( IssuerCommand::MergeRevocationRegistryDeltas( rev_reg_delta_json, other_rev_reg_delta_json, Box::new(move |result| { let (err, merged_rev_reg_delta) = prepare_result_1!(result, String::new()); trace!("indy_issuer_merge_revocation_registry_deltas: merged_rev_reg_delta: {:?}", merged_rev_reg_delta); let merged_rev_reg_delta = ctypes::string_to_cstring(merged_rev_reg_delta); cb(command_handle, err, merged_rev_reg_delta.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_issuer_merge_revocation_registry_deltas: <<< res: {:?}", res); res } /// Creates a master secret with a given id and stores it in the wallet. /// The id must be unique. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// command_handle: command handle to map callback to user context. /// master_secret_id: (optional, if not present random one will be generated) new master id /// /// #Returns /// out_master_secret_id: Id of generated master secret /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_create_master_secret(command_handle: CommandHandle, wallet_handle: WalletHandle, master_secret_id: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, out_master_secret_id: *const c_char)>) -> ErrorCode { trace!("indy_prover_create_master_secret: >>> wallet_handle: {:?}, master_secret_id: {:?}", wallet_handle, master_secret_id); check_useful_opt_c_str!(master_secret_id, ErrorCode::CommonInvalidParam3); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_prover_create_master_secret: entities >>> wallet_handle: {:?}, master_secret_id: {:?}", wallet_handle, master_secret_id); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::CreateMasterSecret( wallet_handle, master_secret_id, Box::new(move |result| { let (err, out_master_secret_id) = prepare_result_1!(result, String::new()); trace!("indy_prover_create_master_secret: out_master_secret_id: {:?}", out_master_secret_id); let out_master_secret_id = ctypes::string_to_cstring(out_master_secret_id); cb(command_handle, err, out_master_secret_id.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_create_master_secret: <<< res: {:?}", res); res } /// Creates a credential request for the given credential offer. /// /// The method creates a blinded master secret for a master secret identified by a provided name. /// The master secret identified by the name must be already stored in the secure wallet (see prover_create_master_secret) /// The blinded master secret is a part of the credential request. /// /// #Params /// command_handle: command handle to map callback to user context /// wallet_handle: wallet handler (created by open_wallet) /// prover_did: a DID of the prover /// cred_offer_json: credential offer as a json containing information about the issuer and a credential /// cred_def_json: credential definition json related to <cred_def_id> in <cred_offer_json> /// master_secret_id: the id of the master secret stored in the wallet /// cb: Callback that takes command result as parameter. /// /// #Returns /// cred_req_json: Credential request json for creation of credential by Issuer /// { /// "prover_did" : string, /// "cred_def_id" : string, /// // Fields below can depend on Cred Def type /// "blinded_ms" : <blinded_master_secret>, /// "blinded_ms_correctness_proof" : <blinded_ms_correctness_proof>, /// "nonce": string /// } /// cred_req_metadata_json: Credential request metadata json for further processing of received form Issuer credential. /// Note: cred_req_metadata_json mustn't be shared with Issuer. /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_create_credential_req(command_handle: CommandHandle, wallet_handle: WalletHandle, prover_did: *const c_char, cred_offer_json: *const c_char, cred_def_json: *const c_char, master_secret_id: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, cred_req_json: *const c_char, cred_req_metadata_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_create_credential_req: >>> wallet_handle: {:?}, prover_did: {:?}, cred_offer_json: {:?}, cred_def_json: {:?}, master_secret_id: {:?}", wallet_handle, prover_did, cred_offer_json, cred_def_json, master_secret_id); check_useful_c_str!(prover_did, ErrorCode::CommonInvalidParam3); check_useful_json!(cred_offer_json, ErrorCode::CommonInvalidParam4, CredentialOffer); check_useful_json!(cred_def_json, ErrorCode::CommonInvalidParam5, CredentialDefinition); check_useful_c_str!(master_secret_id, ErrorCode::CommonInvalidParam6); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam7); trace!("indy_prover_create_credential_req: entities >>> wallet_handle: {:?}, prover_did: {:?}, cred_offer_json: {:?}, cred_def_json: {:?}, master_secret_id: {:?}", wallet_handle, prover_did, cred_offer_json, cred_def_json, master_secret_id); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::CreateCredentialRequest( wallet_handle, prover_did, cred_offer_json, cred_def_json, master_secret_id, Box::new(move |result| { let (err, cred_req_json, cred_req_metadata_json) = prepare_result_2!(result, String::new(), String::new()); trace!("indy_prover_create_credential_req: cred_req_json: {:?}, cred_req_metadata_json: {:?}", cred_req_json, cred_req_metadata_json); let cred_req_json = ctypes::string_to_cstring(cred_req_json); let cred_req_metadata_json = ctypes::string_to_cstring(cred_req_metadata_json); cb(command_handle, err, cred_req_json.as_ptr(), cred_req_metadata_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_create_credential_req: <<< res: {:?}", res); res } /// Check credential provided by Issuer for the given credential request, /// updates the credential by a master secret and stores in a secure wallet. /// /// To support efficient and flexible search the following tags will be created for stored credential: /// { /// "schema_id": <credential schema id>, /// "schema_issuer_did": <credential schema issuer did>, /// "schema_name": <credential schema name>, /// "schema_version": <credential schema version>, /// "issuer_did": <credential issuer did>, /// "cred_def_id": <credential definition id>, /// "rev_reg_id": <credential revocation registry id>, // "None" as string if not present /// // for every attribute in <credential values> /// "attr::<attribute name>::marker": "1", /// "attr::<attribute name>::value": <attribute raw value>, /// } /// /// #Params /// command_handle: command handle to map callback to user context. /// wallet_handle: wallet handler (created by open_wallet). /// cred_id: (optional, default is a random one) identifier by which credential will be stored in the wallet /// cred_req_metadata_json: a credential request metadata created by indy_prover_create_credential_req /// cred_json: credential json received from issuer /// cred_def_json: credential definition json related to <cred_def_id> in <cred_json> /// rev_reg_def_json: revocation registry definition json related to <rev_reg_def_id> in <cred_json> /// cb: Callback that takes command result as parameter. /// /// #Returns /// out_cred_id: identifier by which credential is stored in the wallet /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_store_credential(command_handle: CommandHandle, wallet_handle: WalletHandle, cred_id: *const c_char, cred_req_metadata_json: *const c_char, cred_json: *const c_char, cred_def_json: *const c_char, rev_reg_def_json: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, out_cred_id: *const c_char)>) -> ErrorCode { trace!("indy_prover_store_credential: >>> wallet_handle: {:?}, cred_id: {:?}, cred_req_metadata_json: {:?}, cred_json: {:?}, cred_def_json: {:?}, \ cred_def_json: {:?}", wallet_handle, cred_id, cred_req_metadata_json, cred_json, cred_def_json, rev_reg_def_json); check_useful_opt_c_str!(cred_id, ErrorCode::CommonInvalidParam3); check_useful_json!(cred_req_metadata_json, ErrorCode::CommonInvalidParam4, CredentialRequestMetadata); check_useful_json!(cred_json, ErrorCode::CommonInvalidParam5, Credential); check_useful_json!(cred_def_json, ErrorCode::CommonInvalidParam6, CredentialDefinition); check_useful_opt_json!(rev_reg_def_json, ErrorCode::CommonInvalidParam7, RevocationRegistryDefinition); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8); trace!("indy_prover_store_credential: entities >>> wallet_handle: {:?}, cred_id: {:?}, cred_req_metadata_json: {:?}, cred_json: {:?}, cred_def_json: {:?}, \ rev_reg_def_json: {:?}", wallet_handle, cred_id, cred_req_metadata_json, cred_json, cred_def_json, rev_reg_def_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::StoreCredential( wallet_handle, cred_id, cred_req_metadata_json, cred_json, cred_def_json, rev_reg_def_json, Box::new(move |result| { let (err, out_cred_id) = prepare_result_1!(result, String::new()); trace!("indy_prover_store_credential: out_cred_id: {:?}", out_cred_id); let out_cred_id = ctypes::string_to_cstring(out_cred_id); cb(command_handle, err, out_cred_id.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_store_credential: <<< res: {:?}", res); res } /// Gets human readable credential by the given id. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// cred_id: Identifier by which requested credential is stored in the wallet /// cb: Callback that takes command result as parameter. /// /// #Returns /// credential json: /// { /// "referent": string, // cred_id in the wallet /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, /// "schema_id": string, /// "cred_def_id": string, /// "rev_reg_id": Optional<string>, /// "cred_rev_id": Optional<string> /// } /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_get_credential(command_handle: CommandHandle, wallet_handle: WalletHandle, cred_id: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, credential_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_get_credential: >>> wallet_handle: {:?}, cred_id: {:?}", wallet_handle, cred_id); check_useful_c_str!(cred_id, ErrorCode::CommonInvalidParam3); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_prover_get_credential: entities >>> wallet_handle: {:?}, cred_id: {:?}", cred_id, cred_id); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::GetCredential( wallet_handle, cred_id, Box::new(move |result| { let (err, credential_json) = prepare_result_1!(result, String::new()); trace!("indy_prover_get_credential: credential_json: {:?}", credential_json); let credential_json = ctypes::string_to_cstring(credential_json); cb(command_handle, err, credential_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_get_credential: <<< res: {:?}", res); res } /// Gets human readable credentials according to the filter. /// If filter is NULL, then all credentials are returned. /// Credentials can be filtered by Issuer, credential_def and/or Schema. /// /// NOTE: This method is deprecated because immediately returns all fetched credentials. /// Use <indy_prover_search_credentials> to fetch records by small batches. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// filter_json: filter for credentials /// { /// "schema_id": string, (Optional) /// "schema_issuer_did": string, (Optional) /// "schema_name": string, (Optional) /// "schema_version": string, (Optional) /// "issuer_did": string, (Optional) /// "cred_def_id": string, (Optional) /// } /// cb: Callback that takes command result as parameter. /// /// #Returns /// credentials json /// [{ /// "referent": string, // cred_id in the wallet /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, /// "schema_id": string, /// "cred_def_id": string, /// "rev_reg_id": Optional<string>, /// "cred_rev_id": Optional<string> /// }] /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] #[deprecated(since="1.6.1", note="Please use indy_prover_search_credentials instead!")] pub extern fn indy_prover_get_credentials(command_handle: CommandHandle, wallet_handle: WalletHandle, filter_json: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, matched_credentials_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_get_credentials: >>> wallet_handle: {:?}, filter_json: {:?}", wallet_handle, filter_json); check_useful_opt_c_str!(filter_json, ErrorCode::CommonInvalidParam3); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_prover_get_credentials: entities >>> wallet_handle: {:?}, filter_json: {:?}", wallet_handle, filter_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::GetCredentials( wallet_handle, filter_json, Box::new(move |result| { let (err, matched_credentials_json) = prepare_result_1!(result, String::new()); trace!("indy_prover_get_credentials: matched_credentials_json: {:?}", matched_credentials_json); let matched_credentials_json = ctypes::string_to_cstring(matched_credentials_json); cb(command_handle, err, matched_credentials_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_get_credentials: <<< res: {:?}", res); res } /// Search for credentials stored in wallet. /// Credentials can be filtered by tags created during saving of credential. /// /// Instead of immediately returning of fetched credentials /// this call returns search_handle that can be used later /// to fetch records by small batches (with indy_prover_fetch_credentials). /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// query_json: Wql query filter for credentials searching based on tags. /// where query: indy-sdk/docs/design/011-wallet-query-language/README.md /// cb: Callback that takes command result as parameter. /// /// #Returns /// search_handle: Search handle that can be used later to fetch records by small batches (with indy_prover_fetch_credentials) /// total_count: Total count of records /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_search_credentials(command_handle: CommandHandle, wallet_handle: WalletHandle, query_json: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, search_handle: SearchHandle, total_count: usize)>) -> ErrorCode { trace!("indy_prover_search_credentials: >>> wallet_handle: {:?}, query_json: {:?}", wallet_handle, query_json); check_useful_opt_c_str!(query_json, ErrorCode::CommonInvalidParam3); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_prover_search_credentials: entities >>> wallet_handle: {:?}, query_json: {:?}", wallet_handle, query_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::SearchCredentials( wallet_handle, query_json, Box::new(move |result| { let (err, handle, total_count) = prepare_result_2!(result, 0, 0); cb(command_handle, err, handle, total_count) }) )))); let res = prepare_result!(result); trace!("indy_prover_search_credentials: <<< res: {:?}", res); res } /// Fetch next credentials for search. /// /// #Params /// search_handle: Search handle (created by indy_prover_search_credentials) /// count: Count of credentials to fetch /// cb: Callback that takes command result as parameter. /// /// #Returns /// credentials_json: List of human readable credentials: /// [{ /// "referent": string, // cred_id in the wallet /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, /// "schema_id": string, /// "cred_def_id": string, /// "rev_reg_id": Optional<string>, /// "cred_rev_id": Optional<string> /// }] /// NOTE: The list of length less than the requested count means credentials search iterator is completed. /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_fetch_credentials(command_handle: CommandHandle, search_handle: SearchHandle, count: usize, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, credentials_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_fetch_credentials: >>> search_handle: {:?}, count: {:?}", search_handle, count); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4); trace!("indy_prover_fetch_credentials: entities >>> search_handle: {:?}, count: {:?}", search_handle, count); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::FetchCredentials( search_handle, count, Box::new(move |result| { let (err, credentials_json) = prepare_result_1!(result, String::new()); trace!("indy_prover_fetch_credentials: credentials_json: {:?}", credentials_json); let credentials_json = ctypes::string_to_cstring(credentials_json); cb(command_handle, err, credentials_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_fetch_credentials: <<< res: {:?}", res); res } /// Close credentials search (make search handle invalid) /// /// #Params /// search_handle: Search handle (created by indy_prover_search_credentials) /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_close_credentials_search(command_handle: CommandHandle, search_handle: SearchHandle, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode)>) -> ErrorCode { trace!("indy_prover_close_credentials_search: >>> search_handle: {:?}", search_handle); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5); trace!("indy_prover_close_credentials_search: entities >>> search_handle: {:?}", search_handle); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::CloseCredentialsSearch( search_handle, Box::new(move |result| { let err = prepare_result!(result); trace!("indy_prover_close_credentials_search:"); cb(command_handle, err) }) )))); let res = prepare_result!(result); trace!("indy_prover_close_credentials_search: <<< res: {:?}", res); res } /// Gets human readable credentials matching the given proof request. /// /// NOTE: This method is deprecated because immediately returns all fetched credentials. /// Use <indy_prover_search_credentials_for_proof_req> to fetch records by small batches. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// proof_request_json: proof request json /// { /// "name": string, /// "version": string, /// "nonce": string, /// "requested_attributes": { // set of requested attributes /// "<attr_referent>": <attr_info>, // see below /// ..., /// }, /// "requested_predicates": { // set of requested predicates /// "<predicate_referent>": <predicate_info>, // see below /// ..., /// }, /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval for each attribute /// // (can be overridden on attribute level) /// } /// cb: Callback that takes command result as parameter. /// /// where /// attr_referent: Proof-request local identifier of requested attribute /// attr_info: Describes requested attribute /// { /// "name": string, // attribute name, (case insensitive and ignore spaces) /// "restrictions": Optional<filter_json>, // see above /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval this attribute /// // (overrides proof level interval) /// } /// predicate_referent: Proof-request local identifier of requested attribute predicate /// predicate_info: Describes requested attribute predicate /// { /// "name": attribute name, (case insensitive and ignore spaces) /// "p_type": predicate type (Currently ">=" only) /// "p_value": int predicate value /// "restrictions": Optional<filter_json>, // see above /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval this attribute /// // (overrides proof level interval) /// } /// non_revoc_interval: Defines non-revocation interval /// { /// "from": Optional<int>, // timestamp of interval beginning /// "to": Optional<int>, // timestamp of interval ending /// } /// /// #Returns /// credentials_json: json with credentials for the given proof request. /// { /// "requested_attrs": { /// "<attr_referent>": [{ cred_info: <credential_info>, interval: Optional<non_revoc_interval> }], /// ..., /// }, /// "requested_predicates": { /// "requested_predicates": [{ cred_info: <credential_info>, timestamp: Optional<integer> }, { cred_info: <credential_2_info>, timestamp: Optional<integer> }], /// "requested_predicate_2_referent": [{ cred_info: <credential_2_info>, timestamp: Optional<integer> }] /// } /// }, where credential is /// { /// "referent": <string>, /// "attrs": {"attr_name" : "attr_raw_value"}, /// "schema_id": string, /// "cred_def_id": string, /// "rev_reg_id": Optional<int>, /// "cred_rev_id": Optional<int>, /// } /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[deprecated(since="1.6.1", note="Please use indy_prover_search_credentials_for_proof_req instead!")] #[no_mangle] pub extern fn indy_prover_get_credentials_for_proof_req(command_handle: CommandHandle, wallet_handle: WalletHandle, proof_request_json: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, credentials_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_get_credentials_for_proof_req: >>> wallet_handle: {:?}, proof_request_json: {:?}", wallet_handle, proof_request_json); check_useful_json!(proof_request_json, ErrorCode::CommonInvalidParam3, ProofRequest); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5); trace!("indy_prover_get_credentials_for_proof_req: entities >>> wallet_handle: {:?}, proof_request_json: {:?}", wallet_handle, proof_request_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::GetCredentialsForProofReq( wallet_handle, proof_request_json, Box::new(move |result| { let (err, credentials_json) = prepare_result_1!(result, String::new()); trace!("indy_prover_get_credentials_for_proof_req: credentials_json: {:?}", credentials_json); let credentials_json = ctypes::string_to_cstring(credentials_json); cb(command_handle, err, credentials_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_get_credentials_for_proof_req: <<< res: {:?}", res); res } /// Search for credentials matching the given proof request. /// /// Instead of immediately returning of fetched credentials /// this call returns search_handle that can be used later /// to fetch records by small batches (with indy_prover_fetch_credentials_for_proof_req). /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// proof_request_json: proof request json /// { /// "name": string, /// "version": string, /// "nonce": string, /// "requested_attributes": { // set of requested attributes /// "<attr_referent>": <attr_info>, // see below /// ..., /// }, /// "requested_predicates": { // set of requested predicates /// "<predicate_referent>": <predicate_info>, // see below /// ..., /// }, /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval for each attribute /// // (can be overridden on attribute level) /// } /// extra_query_json:(Optional) List of extra queries that will be applied to correspondent attribute/predicate: /// { /// "<attr_referent>": <wql query>, /// "<predicate_referent>": <wql query>, /// } /// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md /// cb: Callback that takes command result as parameter. /// /// #Returns /// search_handle: Search handle that can be used later to fetch records by small batches (with indy_prover_fetch_credentials_for_proof_req) /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_search_credentials_for_proof_req(command_handle: CommandHandle, wallet_handle: WalletHandle, proof_request_json: *const c_char, extra_query_json: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, search_handle: SearchHandle)>) -> ErrorCode { trace!("indy_prover_search_credentials_for_proof_req: >>> wallet_handle: {:?}, proof_request_json: {:?}, extra_query_json: {:?}", wallet_handle, proof_request_json, extra_query_json); check_useful_json!(proof_request_json, ErrorCode::CommonInvalidParam3, ProofRequest); check_useful_opt_json!(extra_query_json, ErrorCode::CommonInvalidParam4, ProofRequestExtraQuery); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5); trace!("indy_prover_search_credentials_for_proof_req: entities >>> wallet_handle: {:?}, proof_request_json: {:?}, extra_query_json: {:?}", wallet_handle, proof_request_json, extra_query_json); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::SearchCredentialsForProofReq( wallet_handle, proof_request_json, extra_query_json, Box::new(move |result| { let (err, search_handle) = prepare_result_1!(result, 0); trace!("indy_prover_search_credentials_for_proof_req: search_handle: {:?}", search_handle); cb(command_handle, err, search_handle) }), )))); let res = prepare_result!(result); trace!("indy_prover_search_credentials_for_proof_req: <<< res: {:?}", res); res } /// Fetch next credentials for the requested item using proof request search /// handle (created by indy_prover_search_credentials_for_proof_req). /// /// #Params /// search_handle: Search handle (created by indy_prover_search_credentials_for_proof_req) /// item_referent: Referent of attribute/predicate in the proof request /// count: Count of credentials to fetch /// cb: Callback that takes command result as parameter. /// /// #Returns /// credentials_json: List of credentials for the given proof request. /// [{ /// cred_info: <credential_info>, /// interval: Optional<non_revoc_interval> /// }] /// where /// credential_info: /// { /// "referent": <string>, /// "attrs": {"attr_name" : "attr_raw_value"}, /// "schema_id": string, /// "cred_def_id": string, /// "rev_reg_id": Optional<int>, /// "cred_rev_id": Optional<int>, /// } /// non_revoc_interval: /// { /// "from": Optional<int>, // timestamp of interval beginning /// "to": Optional<int>, // timestamp of interval ending /// } /// NOTE: The list of length less than the requested count means that search iterator /// correspondent to the requested <item_referent> is completed. /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_fetch_credentials_for_proof_req(command_handle: CommandHandle, search_handle: SearchHandle, item_referent: *const c_char, count: usize, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, credentials_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_fetch_credentials_for_proof_req: >>> search_handle: {:?}, count: {:?}", search_handle, count); check_useful_c_str!(item_referent, ErrorCode::CommonInvalidParam4); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5); trace!("indy_prover_fetch_credentials_for_proof_req: entities >>> search_handle: {:?}, count: {:?}", search_handle, count); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::FetchCredentialForProofReq( search_handle, item_referent, count, Box::new(move |result| { let (err, credentials_json) = prepare_result_1!(result, String::new()); trace!("indy_prover_fetch_credentials_for_proof_request: credentials_json: {:?}", credentials_json); let credentials_json = ctypes::string_to_cstring(credentials_json); cb(command_handle, err, credentials_json.as_ptr()) }), )))); let res = prepare_result!(result); trace!("indy_prover_fetch_credentials_for_proof_req: <<< res: {:?}", res); res } /// Close credentials search for proof request (make search handle invalid) /// /// #Params /// search_handle: Search handle (created by indy_prover_search_credentials_for_proof_req) /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_close_credentials_search_for_proof_req(command_handle: CommandHandle, search_handle: SearchHandle, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode)>) -> ErrorCode { trace!("indy_prover_close_credentials_search_for_proof_req: >>> search_handle: {:?}", search_handle); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5); trace!("indy_prover_close_credentials_search_for_proof_req: entities >>> search_handle: {:?}", search_handle); let result = CommandExecutor::instance() .send(Command::Anoncreds( AnoncredsCommand::Prover( ProverCommand::CloseCredentialsSearchForProofReq( search_handle, Box::new(move |result| { let err = prepare_result!(result); trace!("indy_prover_close_credentials_search:"); cb(command_handle, err) }), )))); let res = prepare_result!(result); trace!("indy_prover_close_credentials_search_for_proof_req: <<< res: {:?}", res); res } /// Creates a proof according to the given proof request /// Either a corresponding credential with optionally revealed attributes or self-attested attribute must be provided /// for each requested attribute (see indy_prover_get_credentials_for_pool_req). /// A proof request may request multiple credentials from different schemas and different issuers. /// All required schemas, public keys and revocation registries must be provided. /// The proof request also contains nonce. /// The proof contains either proof or self-attested attribute value for each requested attribute. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// command_handle: command handle to map callback to user context. /// proof_request_json: proof request json /// { /// "name": string, /// "version": string, /// "nonce": string, /// "requested_attributes": { // set of requested attributes /// "<attr_referent>": <attr_info>, // see below /// ..., /// }, /// "requested_predicates": { // set of requested predicates /// "<predicate_referent>": <predicate_info>, // see below /// ..., /// }, /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval for each attribute /// // (can be overridden on attribute level) /// } /// requested_credentials_json: either a credential or self-attested attribute for each requested attribute /// { /// "self_attested_attributes": { /// "self_attested_attribute_referent": string /// }, /// "requested_attributes": { /// "requested_attribute_referent_1": {"cred_id": string, "timestamp": Optional<number>, revealed: <bool> }}, /// "requested_attribute_referent_2": {"cred_id": string, "timestamp": Optional<number>, revealed: <bool> }} /// }, /// "requested_predicates": { /// "requested_predicates_referent_1": {"cred_id": string, "timestamp": Optional<number> }}, /// } /// } /// master_secret_id: the id of the master secret stored in the wallet /// schemas_json: all schemas json participating in the proof request /// { /// <schema1_id>: <schema1_json>, /// <schema2_id>: <schema2_json>, /// <schema3_id>: <schema3_json>, /// } /// credential_defs_json: all credential definitions json participating in the proof request /// { /// "cred_def1_id": <credential_def1_json>, /// "cred_def2_id": <credential_def2_json>, /// "cred_def3_id": <credential_def3_json>, /// } /// rev_states_json: all revocation states json participating in the proof request /// { /// "rev_reg_def1_id": { /// "timestamp1": <rev_state1>, /// "timestamp2": <rev_state2>, /// }, /// "rev_reg_def2_id": { /// "timestamp3": <rev_state3> /// }, /// "rev_reg_def3_id": { /// "timestamp4": <rev_state4> /// }, /// } /// cb: Callback that takes command result as parameter. /// /// where /// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md /// attr_referent: Proof-request local identifier of requested attribute /// attr_info: Describes requested attribute /// { /// "name": string, // attribute name, (case insensitive and ignore spaces) /// "restrictions": Optional<wql query>, /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval this attribute /// // (overrides proof level interval) /// } /// predicate_referent: Proof-request local identifier of requested attribute predicate /// predicate_info: Describes requested attribute predicate /// { /// "name": attribute name, (case insensitive and ignore spaces) /// "p_type": predicate type (Currently >= only) /// "p_value": predicate value /// "restrictions": Optional<wql query>, /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval this attribute /// // (overrides proof level interval) /// } /// non_revoc_interval: Defines non-revocation interval /// { /// "from": Optional<int>, // timestamp of interval beginning /// "to": Optional<int>, // timestamp of interval ending /// } /// /// #Returns /// Proof json /// For each requested attribute either a proof (with optionally revealed attribute value) or /// self-attested attribute value is provided. /// Each proof is associated with a credential and corresponding schema_id, cred_def_id, rev_reg_id and timestamp. /// There is also aggregated proof part common for all credential proofs. /// { /// "requested_proof": { /// "revealed_attrs": { /// "requested_attr1_id": {sub_proof_index: number, raw: string, encoded: string}, /// "requested_attr4_id": {sub_proof_index: number: string, encoded: string}, /// }, /// "unrevealed_attrs": { /// "requested_attr3_id": {sub_proof_index: number} /// }, /// "self_attested_attrs": { /// "requested_attr2_id": self_attested_value, /// }, /// "requested_predicates": { /// "requested_predicate_1_referent": {sub_proof_index: int}, /// "requested_predicate_2_referent": {sub_proof_index: int}, /// } /// } /// "proof": { /// "proofs": [ <credential_proof>, <credential_proof>, <credential_proof> ], /// "aggregated_proof": <aggregated_proof> /// } /// "identifiers": [{schema_id, cred_def_id, Optional<rev_reg_id>, Optional<timestamp>}] /// } /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_prover_create_proof(command_handle: CommandHandle, wallet_handle: WalletHandle, proof_req_json: *const c_char, requested_credentials_json: *const c_char, master_secret_id: *const c_char, schemas_json: *const c_char, credential_defs_json: *const c_char, rev_states_json: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, proof_json: *const c_char)>) -> ErrorCode { trace!("indy_prover_create_proof: >>> wallet_handle: {:?}, proof_req_json: {:?}, requested_credentials_json: {:?}, master_secret_id: {:?}, \ schemas_json: {:?}, credential_defs_json: {:?}, rev_states_json: {:?}", wallet_handle, proof_req_json, requested_credentials_json, master_secret_id, schemas_json, credential_defs_json, rev_states_json); check_useful_json!(proof_req_json, ErrorCode::CommonInvalidParam3, ProofRequest); check_useful_json!(requested_credentials_json, ErrorCode::CommonInvalidParam4, RequestedCredentials); check_useful_c_str!(master_secret_id, ErrorCode::CommonInvalidParam5); check_useful_json!(schemas_json, ErrorCode::CommonInvalidParam6, HashMap<String, Schema>); check_useful_json!(credential_defs_json, ErrorCode::CommonInvalidParam7, HashMap<String, CredentialDefinition>); check_useful_json!(rev_states_json, ErrorCode::CommonInvalidParam8, HashMap<String, HashMap<u64, RevocationState>>); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam9); trace!("indy_prover_create_proof: entities >>> wallet_handle: {:?}, proof_req_json: {:?}, requested_credentials_json: {:?}, master_secret_id: {:?}, \ schemas_json: {:?}, credential_defs_json: {:?}, rev_states_json: {:?}", wallet_handle, proof_req_json, requested_credentials_json, master_secret_id, schemas_json, credential_defs_json, rev_states_json); let result = CommandExecutor::instance() .send(Command::Anoncreds(AnoncredsCommand::Prover(ProverCommand::CreateProof( wallet_handle, proof_req_json, requested_credentials_json, master_secret_id, schemas_json, credential_defs_json, rev_states_json, Box::new(move |result| { let (err, proof_json) = prepare_result_1!(result, String::new()); trace!("indy_prover_create_proof: proof_json: {:?}", proof_json); let proof_json = ctypes::string_to_cstring(proof_json); cb(command_handle, err, proof_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_prover_create_proof: <<< res: {:?}", res); res } /// Verifies a proof (of multiple credential). /// All required schemas, public keys and revocation registries must be provided. /// /// #Params /// wallet_handle: wallet handler (created by open_wallet). /// command_handle: command handle to map callback to user context. /// proof_request_json: proof request json /// { /// "name": string, /// "version": string, /// "nonce": string, /// "requested_attributes": { // set of requested attributes /// "<attr_referent>": <attr_info>, // see below /// ..., /// }, /// "requested_predicates": { // set of requested predicates /// "<predicate_referent>": <predicate_info>, // see below /// ..., /// }, /// "non_revoked": Optional<<non_revoc_interval>>, // see below, /// // If specified prover must proof non-revocation /// // for date in this interval for each attribute /// // (can be overridden on attribute level) /// } /// proof_json: created for request proof json /// { /// "requested_proof": { /// "revealed_attrs": { /// "requested_attr1_id": {sub_proof_index: number, raw: string, encoded: string}, /// "requested_attr4_id": {sub_proof_index: number: string, encoded: string}, /// }, /// "unrevealed_attrs": { /// "requested_attr3_id": {sub_proof_index: number} /// }, /// "self_attested_attrs": { /// "requested_attr2_id": self_attested_value, /// }, /// "requested_predicates": { /// "requested_predicate_1_referent": {sub_proof_index: int}, /// "requested_predicate_2_referent": {sub_proof_index: int}, /// } /// } /// "proof": { /// "proofs": [ <credential_proof>, <credential_proof>, <credential_proof> ], /// "aggregated_proof": <aggregated_proof> /// } /// "identifiers": [{schema_id, cred_def_id, Optional<rev_reg_id>, Optional<timestamp>}] /// } /// schemas_json: all schema jsons participating in the proof /// { /// <schema1_id>: <schema1_json>, /// <schema2_id>: <schema2_json>, /// <schema3_id>: <schema3_json>, /// } /// credential_defs_json: all credential definitions json participating in the proof /// { /// "cred_def1_id": <credential_def1_json>, /// "cred_def2_id": <credential_def2_json>, /// "cred_def3_id": <credential_def3_json>, /// } /// rev_reg_defs_json: all revocation registry definitions json participating in the proof /// { /// "rev_reg_def1_id": <rev_reg_def1_json>, /// "rev_reg_def2_id": <rev_reg_def2_json>, /// "rev_reg_def3_id": <rev_reg_def3_json>, /// } /// rev_regs_json: all revocation registries json participating in the proof /// { /// "rev_reg_def1_id": { /// "timestamp1": <rev_reg1>, /// "timestamp2": <rev_reg2>, /// }, /// "rev_reg_def2_id": { /// "timestamp3": <rev_reg3> /// }, /// "rev_reg_def3_id": { /// "timestamp4": <rev_reg4> /// }, /// } /// cb: Callback that takes command result as parameter. /// /// #Returns /// valid: true - if signature is valid, false - otherwise /// /// #Errors /// Annoncreds* /// Common* /// Wallet* #[no_mangle] pub extern fn indy_verifier_verify_proof(command_handle: CommandHandle, proof_request_json: *const c_char, proof_json: *const c_char, schemas_json: *const c_char, credential_defs_json: *const c_char, rev_reg_defs_json: *const c_char, rev_regs_json: *const c_char, cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode, valid: bool)>) -> ErrorCode { trace!("indy_verifier_verify_proof: >>> proof_request_json: {:?}, proof_json: {:?}, schemas_json: {:?}, credential_defs_json: {:?}, \ rev_reg_defs_json: {:?}, rev_regs_json: {:?}", proof_request_json, proof_json, schemas_json, credential_defs_json, rev_reg_defs_json, rev_regs_json); check_useful_json!(proof_request_json, ErrorCode::CommonInvalidParam2, ProofRequest); check_useful_json!(proof_json, ErrorCode::CommonInvalidParam3, Proof); check_useful_json!(schemas_json, ErrorCode::CommonInvalidParam4, HashMap<String, Schema>); check_useful_json!(credential_defs_json, ErrorCode::CommonInvalidParam5, HashMap<String, CredentialDefinition>); check_useful_json!(rev_reg_defs_json, ErrorCode::CommonInvalidParam6, HashMap<String, RevocationRegistryDefinition>); check_useful_json!(rev_regs_json, ErrorCode::CommonInvalidParam7, HashMap<String, HashMap<u64, RevocationRegistry>>); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8); trace!("indy_verifier_verify_proof: entities >>> proof_request_json: {:?}, proof_json: {:?}, schemas_json: {:?}, credential_defs_json: {:?}, \ rev_reg_defs_json: {:?}, rev_regs_json: {:?}", proof_request_json, proof_json, schemas_json, credential_defs_json, rev_reg_defs_json, rev_regs_json); let result = CommandExecutor::instance() .send(Command::Anoncreds(AnoncredsCommand::Verifier(VerifierCommand::VerifyProof( proof_request_json, proof_json, schemas_json, credential_defs_json, rev_reg_defs_json, rev_regs_json, Box::new(move |result| { let (err, valid) = prepare_result_1!(result, false); trace!("indy_verifier_verify_proof: valid: {:?}", valid); cb(command_handle, err, valid) }) )))); let res = prepare_result!(result); trace!("indy_verifier_verify_proof: <<< res: {:?}", res); res } /// Create revocation state for a credential in the particular time moment. /// /// #Params /// command_handle: command handle to map callback to user context /// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails /// rev_reg_def_json: revocation registry definition json /// rev_reg_delta_json: revocation registry definition delta json /// timestamp: time represented as a total number of seconds from Unix Epoch /// cred_rev_id: user credential revocation id in revocation registry /// cb: Callback that takes command result as parameter /// /// #Returns /// revocation state json: /// { /// "rev_reg": <revocation registry>, /// "witness": <witness>, /// "timestamp" : integer /// } /// /// #Errors /// Common* /// Wallet* /// Anoncreds* #[no_mangle] pub extern fn indy_create_revocation_state(command_handle: CommandHandle, blob_storage_reader_handle: IndyHandle, rev_reg_def_json: *const c_char, rev_reg_delta_json: *const c_char, timestamp: u64, cred_rev_id: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, rev_state_json: *const c_char)>) -> ErrorCode { trace!("indy_create_revocation_state: >>> blob_storage_reader_handle: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, timestamp: {:?}, \ cred_rev_id: {:?}", blob_storage_reader_handle, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id); check_useful_json!(rev_reg_def_json, ErrorCode::CommonInvalidParam3, RevocationRegistryDefinition); check_useful_json!(rev_reg_delta_json, ErrorCode::CommonInvalidParam4, RevocationRegistryDelta); check_useful_c_str!(cred_rev_id, ErrorCode::CommonInvalidParam6); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam7); trace!("indy_create_revocation_state: entities >>> blob_storage_reader_handle: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, timestamp: {:?}, \ cred_rev_id: {:?}", blob_storage_reader_handle, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id); let result = CommandExecutor::instance() .send(Command::Anoncreds(AnoncredsCommand::Prover(ProverCommand::CreateRevocationState( blob_storage_reader_handle, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id, Box::new(move |result| { let (err, rev_state_json) = prepare_result_1!(result, String::new()); trace!("indy_create_revocation_state: rev_state_json: {:?}", rev_state_json); let rev_state_json = ctypes::string_to_cstring(rev_state_json); cb(command_handle, err, rev_state_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_create_revocation_state: <<< res: {:?}", res); res } /// Create new revocation state for a credential based on existed state /// at the particular time moment (to reduce calculation time). /// /// #Params /// command_handle: command handle to map callback to user context /// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails /// rev_state_json: revocation registry state json /// rev_reg_def_json: revocation registry definition json /// rev_reg_delta_json: revocation registry definition delta json /// timestamp: time represented as a total number of seconds from Unix Epoch /// cred_rev_id: user credential revocation id in revocation registry /// cb: Callback that takes command result as parameter /// /// #Returns /// revocation state json: /// { /// "rev_reg": <revocation registry>, /// "witness": <witness>, /// "timestamp" : integer /// } /// /// #Errors /// Common* /// Wallet* /// Anoncreds* #[no_mangle] pub extern fn indy_update_revocation_state(command_handle: CommandHandle, blob_storage_reader_handle: IndyHandle, rev_state_json: *const c_char, rev_reg_def_json: *const c_char, rev_reg_delta_json: *const c_char, timestamp: u64, cred_rev_id: *const c_char, cb: Option<extern fn( command_handle_: CommandHandle, err: ErrorCode, updated_rev_state_json: *const c_char)>) -> ErrorCode { trace!("indy_update_revocation_state: >>> blob_storage_reader_handle: {:?}, rev_state_json: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, \ timestamp: {:?}, cred_rev_id: {:?}", blob_storage_reader_handle, rev_state_json, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id); check_useful_json!(rev_state_json, ErrorCode::CommonInvalidParam3, RevocationState); check_useful_json!(rev_reg_def_json, ErrorCode::CommonInvalidParam4, RevocationRegistryDefinition); check_useful_json!(rev_reg_delta_json, ErrorCode::CommonInvalidParam5, RevocationRegistryDelta); check_useful_c_str!(cred_rev_id, ErrorCode::CommonInvalidParam7); check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8); trace!("indy_update_revocation_state: entities >>> blob_storage_reader_handle: {:?}, rev_state_json: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, \ timestamp: {:?}, cred_rev_id: {:?}", blob_storage_reader_handle, rev_state_json, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id); let result = CommandExecutor::instance() .send(Command::Anoncreds(AnoncredsCommand::Prover(ProverCommand::UpdateRevocationState( blob_storage_reader_handle, rev_state_json, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id, Box::new(move |result| { let (err, updated_rev_info_json) = prepare_result_1!(result, String::new()); trace!("indy_update_revocation_state: updated_rev_info_json: {:?}", updated_rev_info_json); let updated_rev_info_json = ctypes::string_to_cstring(updated_rev_info_json); cb(command_handle, err, updated_rev_info_json.as_ptr()) }) )))); let res = prepare_result!(result); trace!("indy_update_revocation_state: <<< res: {:?}", res); res }
srottem/indy-sdk
libindy/src/api/anoncreds.rs
Rust
apache-2.0
91,966
/*! * IE10 viewport hack for Surface/desktop Windows 8 bug * Copyright 2014-2017 The Bootstrap Authors * Copyright 2014-2017 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) */ // See the Getting Started docs for more information: // https://getbootstrap.com/getting-started/#support-ie10-width (function () { 'use strict' if (navigator.userAgent.match(/IEMobile\/10\.0/)) { var msViewportStyle = document.createElement('style') msViewportStyle.appendChild( document.createTextNode( '@-ms-viewport{width:auto!important}' ) ) document.head.appendChild(msViewportStyle) } }())
Recks11/theLXGweb
src/main/webapp/resources/lxg_fiesta/js/ie10-viewport-bug-workaround.js
JavaScript
apache-2.0
710
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.apiv1.internalmaterials.representers.materials; import com.thoughtworks.go.api.base.OutputWriter; import com.thoughtworks.go.config.materials.ScmMaterialConfig; import com.thoughtworks.go.config.materials.perforce.P4MaterialConfig; import java.util.function.Consumer; public abstract class ScmMaterialRepresenter<T extends ScmMaterialConfig> implements MaterialRepresenter<T> { @Override public Consumer<OutputWriter> toJSON(T scmMaterialConfig) { return jsonWriter -> { if (!(scmMaterialConfig instanceof P4MaterialConfig)) { jsonWriter.add("url", scmMaterialConfig.getUriForDisplay()); } jsonWriter.add("name", scmMaterialConfig.getName()); jsonWriter.add("auto_update", scmMaterialConfig.isAutoUpdate()); }; } }
Skarlso/gocd
api/api-internal-materials-v1/src/main/java/com/thoughtworks/go/apiv1/internalmaterials/representers/materials/ScmMaterialRepresenter.java
Java
apache-2.0
1,440
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002, 2010 Oracle and/or its affiliates. All rights reserved. * * $Id$ */ package com.sleepycat.collections.test; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import com.sleepycat.collections.CurrentTransaction; import com.sleepycat.db.Environment; import com.sleepycat.util.test.TestEnv; /** * @author Chao Huang */ public class TestSR15721 extends TestCase { /** * Runs a command line collection test. * @see #usage */ public static void main(String[] args) { if (args.length == 1 && (args[0].equals("-h") || args[0].equals("-help"))) { usage(); } else { junit.framework.TestResult tr = junit.textui.TestRunner.run(suite()); if (tr.errorCount() > 0 || tr.failureCount() > 0) { System.exit(1); } else { System.exit(0); } } } private static void usage() { System.out.println( "Usage: java com.sleepycat.collections.test.TestSR15721" + " [-h | -help]\n"); System.exit(2); } public static Test suite() { TestSuite suite = new TestSuite(TestSR15721.class); return suite; } private Environment env; private CurrentTransaction currentTxn; @Override public void setUp() throws Exception { env = TestEnv.TXN.open("TestSR15721"); currentTxn = CurrentTransaction.getInstance(env); } @Override public void tearDown() { try { if (env != null) { env.close(); } } catch (Exception e) { System.out.println("Ignored exception during tearDown: " + e); } finally { /* Ensure that GC can cleanup. */ env = null; currentTxn = null; } } /** * Tests that the CurrentTransaction instance doesn't indeed allow GC to * reclaim while attached environment is open. [#15721] */ public void testSR15721Fix() throws Exception { int hash = currentTxn.hashCode(); int hash2 = -1; currentTxn = CurrentTransaction.getInstance(env); hash2 = currentTxn.hashCode(); assertTrue(hash == hash2); currentTxn.beginTransaction(null); currentTxn = null; hash2 = -1; for (int i = 0; i < 10; i += 1) { byte[] x = null; try { x = new byte[Integer.MAX_VALUE - 1]; fail(); } catch (OutOfMemoryError expected) { } assertNull(x); System.gc(); } currentTxn = CurrentTransaction.getInstance(env); hash2 = currentTxn.hashCode(); currentTxn.commitTransaction(); assertTrue(hash == hash2); } }
racker/omnibus
source/db-5.0.26.NC/test/scr024/src/com/sleepycat/collections/test/TestSR15721.java
Java
apache-2.0
2,986
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002, 2010 Oracle and/or its affiliates. All rights reserved. * * $Id$ */ package collections.ship.entity; /** * A Part represents the combined key/data pair for a part entity. * * <p>In this sample, Part is created from the stored key/data entry using a * SerialSerialBinding. See {@link SampleViews.PartBinding} for details. * Since this class is not used directly for data storage, it does not need to * be Serializable.</p> * * @author Mark Hayes */ public class Part { private String number; private String name; private String color; private Weight weight; private String city; public Part(String number, String name, String color, Weight weight, String city) { this.number = number; this.name = name; this.color = color; this.weight = weight; this.city = city; } public final String getNumber() { return number; } public final String getName() { return name; } public final String getColor() { return color; } public final Weight getWeight() { return weight; } public final String getCity() { return city; } public String toString() { return "[Part: number=" + number + " name=" + name + " color=" + color + " weight=" + weight + " city=" + city + ']'; } }
racker/omnibus
source/db-5.0.26.NC/examples_java/src/collections/ship/entity/Part.java
Java
apache-2.0
1,515
Scalr.regPage('Scalr.ui.account2.environments.clouds.gce', function (loadParams, moduleParams) { var params = moduleParams['params']; var form = Ext.create('Ext.form.Panel', { bodyCls: 'x-container-fieldset', fieldDefaults: { anchor: '100%', labelWidth: 110 }, autoScroll: true, items: [{ xtype: 'component', cls: 'x-fieldset-subheader', html: 'OAuth Service Account' },{ xtype: 'hidden', name: 'gce.is_enabled', value: 'on' },{ xtype: 'buttongroupfield', margin: '0 0 12', layout: 'hbox', name: 'mode', submitValue: false, defaults: { flex: 1 }, items: [{ text: 'Configure manually', value: 'manual' },{ text: 'Upload JSON key', value: 'jsonkey' }], listeners: { change: function(comp, value) { var form = comp.up('form'), ct, fields; form.suspendLayouts(); Ext.each(['manual', 'jsonkey'], function(v){ ct = form.down('#' + v); ct.setVisible(value === v); Ext.each(ct.query('[isFormField]'), function(field){ field.setDisabled(value !== v); }); }); form.resumeLayouts(true); } } },{ xtype: 'textfield', fieldLabel: 'Project ID', name: 'gce.project_id', value: params['gce.project_id'] },{ xtype: 'container', layout: 'anchor', itemId: 'manual', items: [{ xtype: 'textfield', fieldLabel: 'Client ID', name: 'gce.client_id', value: params['gce.client_id'] },{ xtype: 'textfield', fieldLabel: 'Email (Service account name)', name: 'gce.service_account_name', value: params['gce.service_account_name'] },{ xtype: 'filefield', fieldLabel: 'Private key', name: 'gce.key', value: params['gce.key'], listeners: { //Bug: file button will not be disabled when filefield is hidden initially afterrender: function(){ this.setDisabled(this.disabled); } } }] },{ xtype: 'container', layout: 'anchor', itemId: 'jsonkey', items: [{ xtype: 'filefield', fieldLabel: 'JSON key', name: 'gce.json_key', value: params['gce.json_key'], listeners: { //Bug: file button will not be disabled when filefield is hidden initially afterrender: function(){ this.setDisabled(this.disabled); } } }] }] }); form.getForm().findField('mode').setValue(params['gce.json_key'] ? 'jsonkey' : 'manual'); return form; });
kikov79/scalr
app/www/ui2/js/ui/account2/environments/clouds/gce.js
JavaScript
apache-2.0
3,361
/* * Copyright 2014 The Luvit Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "private.h" static int luv_loop_close(lua_State* L) { int ret = uv_loop_close(luv_loop(L)); if (ret < 0) return luv_error(L, ret); luv_set_loop(L, NULL); lua_pushinteger(L, ret); return 1; } // These are the same order as uv_run_mode which also starts at 0 static const char *const luv_runmodes[] = { "default", "once", "nowait", NULL }; static int luv_run(lua_State* L) { int mode = luaL_checkoption(L, 1, "default", luv_runmodes); luv_ctx_t* ctx = luv_context(L); ctx->mode = mode; int ret = uv_run(ctx->loop, (uv_run_mode)mode); ctx->mode = -1; if (ret < 0) return luv_error(L, ret); lua_pushboolean(L, ret); return 1; } static int luv_loop_mode(lua_State* L) { luv_ctx_t* ctx = luv_context(L); if (ctx->mode == -1) { lua_pushnil(L); } else { lua_pushstring(L, luv_runmodes[ctx->mode]); } return 1; } static int luv_loop_alive(lua_State* L) { int ret = uv_loop_alive(luv_loop(L)); if (ret < 0) return luv_error(L, ret); lua_pushboolean(L, ret); return 1; } static int luv_stop(lua_State* L) { uv_stop(luv_loop(L)); return 0; } static int luv_backend_fd(lua_State* L) { int ret = uv_backend_fd(luv_loop(L)); // -1 is returned when there is no backend fd (like on Windows) if (ret == -1) lua_pushnil(L); else lua_pushinteger(L, ret); return 1; } static int luv_backend_timeout(lua_State* L) { int ret = uv_backend_timeout(luv_loop(L)); lua_pushinteger(L, ret); return 1; } static int luv_now(lua_State* L) { uint64_t now = uv_now(luv_loop(L)); lua_pushinteger(L, now); return 1; } static int luv_update_time(lua_State* L) { uv_update_time(luv_loop(L)); return 0; } static void luv_walk_cb(uv_handle_t* handle, void* arg) { lua_State* L = (lua_State*)arg; luv_handle_t* data = (luv_handle_t*)handle->data; // Sanity check // Most invalid values are large and refs are small, 0x1000000 is arbitrary. assert(data && data->ref < 0x1000000); lua_pushvalue(L, 1); // Copy the function luv_find_handle(L, data); // Get the userdata data->ctx->cb_pcall(L, 1, 0, 0); // Call the function } static int luv_walk(lua_State* L) { luaL_checktype(L, 1, LUA_TFUNCTION); uv_walk(luv_loop(L), luv_walk_cb, L); return 0; } #if LUV_UV_VERSION_GEQ(1, 0, 2) static const char *const luv_loop_configure_options[] = { "block_signal", #if LUV_UV_VERSION_GEQ(1, 39, 0) "metrics_idle_time", #endif NULL }; static int luv_loop_configure(lua_State* L) { uv_loop_t* loop = luv_loop(L); uv_loop_option option = 0; int ret = 0; switch (luaL_checkoption(L, 1, NULL, luv_loop_configure_options)) { case 0: option = UV_LOOP_BLOCK_SIGNAL; break; #if LUV_UV_VERSION_GEQ(1, 39, 0) case 1: option = UV_METRICS_IDLE_TIME; break; #endif default: break; /* unreachable */ } if (option == UV_LOOP_BLOCK_SIGNAL) { // lua_isstring checks for string or number int signal; luaL_argcheck(L, lua_isstring(L, 2), 2, "block_signal option: expected signal as string or number"); signal = luv_parse_signal(L, 2); ret = uv_loop_configure(loop, UV_LOOP_BLOCK_SIGNAL, signal); } else { ret = uv_loop_configure(loop, option); } return luv_result(L, ret); } #endif
luvit/luv
src/loop.c
C
apache-2.0
3,853
// automatically generated by the FlatBuffers compiler, do not modify #ifndef FLATBUFFERS_GENERATED_REFLECTION_REFLECTION_H_ #define FLATBUFFERS_GENERATED_REFLECTION_REFLECTION_H_ #include "flatbuffers/flatbuffers.h" namespace reflection { struct Type; struct KeyValue; struct EnumVal; struct Enum; struct Field; struct Object; struct RPCCall; struct Service; struct Schema; enum BaseType { None = 0, UType = 1, Bool = 2, Byte = 3, UByte = 4, Short = 5, UShort = 6, Int = 7, UInt = 8, Long = 9, ULong = 10, Float = 11, Double = 12, String = 13, Vector = 14, Obj = 15, Union = 16 }; inline const BaseType (&EnumValuesBaseType())[17] { static const BaseType values[] = { None, UType, Bool, Byte, UByte, Short, UShort, Int, UInt, Long, ULong, Float, Double, String, Vector, Obj, Union }; return values; } inline const char * const *EnumNamesBaseType() { static const char * const names[] = { "None", "UType", "Bool", "Byte", "UByte", "Short", "UShort", "Int", "UInt", "Long", "ULong", "Float", "Double", "String", "Vector", "Obj", "Union", nullptr }; return names; } inline const char *EnumNameBaseType(BaseType e) { const size_t index = static_cast<int>(e); return EnumNamesBaseType()[index]; } struct Type FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_BASE_TYPE = 4, VT_ELEMENT = 6, VT_INDEX = 8 }; BaseType base_type() const { return static_cast<BaseType>(GetField<int8_t>(VT_BASE_TYPE, 0)); } BaseType element() const { return static_cast<BaseType>(GetField<int8_t>(VT_ELEMENT, 0)); } int32_t index() const { return GetField<int32_t>(VT_INDEX, -1); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_BASE_TYPE) && VerifyField<int8_t>(verifier, VT_ELEMENT) && VerifyField<int32_t>(verifier, VT_INDEX) && verifier.EndTable(); } }; struct TypeBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_base_type(BaseType base_type) { fbb_.AddElement<int8_t>(Type::VT_BASE_TYPE, static_cast<int8_t>(base_type), 0); } void add_element(BaseType element) { fbb_.AddElement<int8_t>(Type::VT_ELEMENT, static_cast<int8_t>(element), 0); } void add_index(int32_t index) { fbb_.AddElement<int32_t>(Type::VT_INDEX, index, -1); } explicit TypeBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } TypeBuilder &operator=(const TypeBuilder &); flatbuffers::Offset<Type> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<Type>(end); return o; } }; inline flatbuffers::Offset<Type> CreateType( flatbuffers::FlatBufferBuilder &_fbb, BaseType base_type = None, BaseType element = None, int32_t index = -1) { TypeBuilder builder_(_fbb); builder_.add_index(index); builder_.add_element(element); builder_.add_base_type(base_type); return builder_.Finish(); } struct KeyValue FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_KEY = 4, VT_VALUE = 6 }; const flatbuffers::String *key() const { return GetPointer<const flatbuffers::String *>(VT_KEY); } bool KeyCompareLessThan(const KeyValue *o) const { return *key() < *o->key(); } int KeyCompareWithValue(const char *val) const { return strcmp(key()->c_str(), val); } const flatbuffers::String *value() const { return GetPointer<const flatbuffers::String *>(VT_VALUE); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_KEY) && verifier.VerifyString(key()) && VerifyOffset(verifier, VT_VALUE) && verifier.VerifyString(value()) && verifier.EndTable(); } }; struct KeyValueBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_key(flatbuffers::Offset<flatbuffers::String> key) { fbb_.AddOffset(KeyValue::VT_KEY, key); } void add_value(flatbuffers::Offset<flatbuffers::String> value) { fbb_.AddOffset(KeyValue::VT_VALUE, value); } explicit KeyValueBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } KeyValueBuilder &operator=(const KeyValueBuilder &); flatbuffers::Offset<KeyValue> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<KeyValue>(end); fbb_.Required(o, KeyValue::VT_KEY); return o; } }; inline flatbuffers::Offset<KeyValue> CreateKeyValue( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> key = 0, flatbuffers::Offset<flatbuffers::String> value = 0) { KeyValueBuilder builder_(_fbb); builder_.add_value(value); builder_.add_key(key); return builder_.Finish(); } inline flatbuffers::Offset<KeyValue> CreateKeyValueDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *key = nullptr, const char *value = nullptr) { return reflection::CreateKeyValue( _fbb, key ? _fbb.CreateString(key) : 0, value ? _fbb.CreateString(value) : 0); } struct EnumVal FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_NAME = 4, VT_VALUE = 6, VT_OBJECT = 8, VT_UNION_TYPE = 10, VT_DOCUMENTATION = 12 }; const flatbuffers::String *name() const { return GetPointer<const flatbuffers::String *>(VT_NAME); } int64_t value() const { return GetField<int64_t>(VT_VALUE, 0); } bool KeyCompareLessThan(const EnumVal *o) const { return value() < o->value(); } int KeyCompareWithValue(int64_t val) const { return static_cast<int>(value() > val) - static_cast<int>(value() < val); } const Object *object() const { return GetPointer<const Object *>(VT_OBJECT); } const Type *union_type() const { return GetPointer<const Type *>(VT_UNION_TYPE); } const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *documentation() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DOCUMENTATION); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_NAME) && verifier.VerifyString(name()) && VerifyField<int64_t>(verifier, VT_VALUE) && VerifyOffset(verifier, VT_OBJECT) && verifier.VerifyTable(object()) && VerifyOffset(verifier, VT_UNION_TYPE) && verifier.VerifyTable(union_type()) && VerifyOffset(verifier, VT_DOCUMENTATION) && verifier.VerifyVector(documentation()) && verifier.VerifyVectorOfStrings(documentation()) && verifier.EndTable(); } }; struct EnumValBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_name(flatbuffers::Offset<flatbuffers::String> name) { fbb_.AddOffset(EnumVal::VT_NAME, name); } void add_value(int64_t value) { fbb_.AddElement<int64_t>(EnumVal::VT_VALUE, value, 0); } void add_object(flatbuffers::Offset<Object> object) { fbb_.AddOffset(EnumVal::VT_OBJECT, object); } void add_union_type(flatbuffers::Offset<Type> union_type) { fbb_.AddOffset(EnumVal::VT_UNION_TYPE, union_type); } void add_documentation(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation) { fbb_.AddOffset(EnumVal::VT_DOCUMENTATION, documentation); } explicit EnumValBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } EnumValBuilder &operator=(const EnumValBuilder &); flatbuffers::Offset<EnumVal> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<EnumVal>(end); fbb_.Required(o, EnumVal::VT_NAME); return o; } }; inline flatbuffers::Offset<EnumVal> CreateEnumVal( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> name = 0, int64_t value = 0, flatbuffers::Offset<Object> object = 0, flatbuffers::Offset<Type> union_type = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation = 0) { EnumValBuilder builder_(_fbb); builder_.add_value(value); builder_.add_documentation(documentation); builder_.add_union_type(union_type); builder_.add_object(object); builder_.add_name(name); return builder_.Finish(); } inline flatbuffers::Offset<EnumVal> CreateEnumValDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, int64_t value = 0, flatbuffers::Offset<Object> object = 0, flatbuffers::Offset<Type> union_type = 0, const std::vector<flatbuffers::Offset<flatbuffers::String>> *documentation = nullptr) { return reflection::CreateEnumVal( _fbb, name ? _fbb.CreateString(name) : 0, value, object, union_type, documentation ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*documentation) : 0); } struct Enum FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_NAME = 4, VT_VALUES = 6, VT_IS_UNION = 8, VT_UNDERLYING_TYPE = 10, VT_ATTRIBUTES = 12, VT_DOCUMENTATION = 14 }; const flatbuffers::String *name() const { return GetPointer<const flatbuffers::String *>(VT_NAME); } bool KeyCompareLessThan(const Enum *o) const { return *name() < *o->name(); } int KeyCompareWithValue(const char *val) const { return strcmp(name()->c_str(), val); } const flatbuffers::Vector<flatbuffers::Offset<EnumVal>> *values() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<EnumVal>> *>(VT_VALUES); } bool is_union() const { return GetField<uint8_t>(VT_IS_UNION, 0) != 0; } const Type *underlying_type() const { return GetPointer<const Type *>(VT_UNDERLYING_TYPE); } const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *attributes() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *>(VT_ATTRIBUTES); } const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *documentation() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DOCUMENTATION); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_NAME) && verifier.VerifyString(name()) && VerifyOffsetRequired(verifier, VT_VALUES) && verifier.VerifyVector(values()) && verifier.VerifyVectorOfTables(values()) && VerifyField<uint8_t>(verifier, VT_IS_UNION) && VerifyOffsetRequired(verifier, VT_UNDERLYING_TYPE) && verifier.VerifyTable(underlying_type()) && VerifyOffset(verifier, VT_ATTRIBUTES) && verifier.VerifyVector(attributes()) && verifier.VerifyVectorOfTables(attributes()) && VerifyOffset(verifier, VT_DOCUMENTATION) && verifier.VerifyVector(documentation()) && verifier.VerifyVectorOfStrings(documentation()) && verifier.EndTable(); } }; struct EnumBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_name(flatbuffers::Offset<flatbuffers::String> name) { fbb_.AddOffset(Enum::VT_NAME, name); } void add_values(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<EnumVal>>> values) { fbb_.AddOffset(Enum::VT_VALUES, values); } void add_is_union(bool is_union) { fbb_.AddElement<uint8_t>(Enum::VT_IS_UNION, static_cast<uint8_t>(is_union), 0); } void add_underlying_type(flatbuffers::Offset<Type> underlying_type) { fbb_.AddOffset(Enum::VT_UNDERLYING_TYPE, underlying_type); } void add_attributes(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes) { fbb_.AddOffset(Enum::VT_ATTRIBUTES, attributes); } void add_documentation(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation) { fbb_.AddOffset(Enum::VT_DOCUMENTATION, documentation); } explicit EnumBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } EnumBuilder &operator=(const EnumBuilder &); flatbuffers::Offset<Enum> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<Enum>(end); fbb_.Required(o, Enum::VT_NAME); fbb_.Required(o, Enum::VT_VALUES); fbb_.Required(o, Enum::VT_UNDERLYING_TYPE); return o; } }; inline flatbuffers::Offset<Enum> CreateEnum( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> name = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<EnumVal>>> values = 0, bool is_union = false, flatbuffers::Offset<Type> underlying_type = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation = 0) { EnumBuilder builder_(_fbb); builder_.add_documentation(documentation); builder_.add_attributes(attributes); builder_.add_underlying_type(underlying_type); builder_.add_values(values); builder_.add_name(name); builder_.add_is_union(is_union); return builder_.Finish(); } inline flatbuffers::Offset<Enum> CreateEnumDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const std::vector<flatbuffers::Offset<EnumVal>> *values = nullptr, bool is_union = false, flatbuffers::Offset<Type> underlying_type = 0, const std::vector<flatbuffers::Offset<KeyValue>> *attributes = nullptr, const std::vector<flatbuffers::Offset<flatbuffers::String>> *documentation = nullptr) { return reflection::CreateEnum( _fbb, name ? _fbb.CreateString(name) : 0, values ? _fbb.CreateVector<flatbuffers::Offset<EnumVal>>(*values) : 0, is_union, underlying_type, attributes ? _fbb.CreateVector<flatbuffers::Offset<KeyValue>>(*attributes) : 0, documentation ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*documentation) : 0); } struct Field FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_NAME = 4, VT_TYPE = 6, VT_ID = 8, VT_OFFSET = 10, VT_DEFAULT_INTEGER = 12, VT_DEFAULT_REAL = 14, VT_DEPRECATED = 16, VT_REQUIRED = 18, VT_KEY = 20, VT_ATTRIBUTES = 22, VT_DOCUMENTATION = 24 }; const flatbuffers::String *name() const { return GetPointer<const flatbuffers::String *>(VT_NAME); } bool KeyCompareLessThan(const Field *o) const { return *name() < *o->name(); } int KeyCompareWithValue(const char *val) const { return strcmp(name()->c_str(), val); } const Type *type() const { return GetPointer<const Type *>(VT_TYPE); } uint16_t id() const { return GetField<uint16_t>(VT_ID, 0); } uint16_t offset() const { return GetField<uint16_t>(VT_OFFSET, 0); } int64_t default_integer() const { return GetField<int64_t>(VT_DEFAULT_INTEGER, 0); } double default_real() const { return GetField<double>(VT_DEFAULT_REAL, 0.0); } bool deprecated() const { return GetField<uint8_t>(VT_DEPRECATED, 0) != 0; } bool required() const { return GetField<uint8_t>(VT_REQUIRED, 0) != 0; } bool key() const { return GetField<uint8_t>(VT_KEY, 0) != 0; } const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *attributes() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *>(VT_ATTRIBUTES); } const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *documentation() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DOCUMENTATION); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_NAME) && verifier.VerifyString(name()) && VerifyOffsetRequired(verifier, VT_TYPE) && verifier.VerifyTable(type()) && VerifyField<uint16_t>(verifier, VT_ID) && VerifyField<uint16_t>(verifier, VT_OFFSET) && VerifyField<int64_t>(verifier, VT_DEFAULT_INTEGER) && VerifyField<double>(verifier, VT_DEFAULT_REAL) && VerifyField<uint8_t>(verifier, VT_DEPRECATED) && VerifyField<uint8_t>(verifier, VT_REQUIRED) && VerifyField<uint8_t>(verifier, VT_KEY) && VerifyOffset(verifier, VT_ATTRIBUTES) && verifier.VerifyVector(attributes()) && verifier.VerifyVectorOfTables(attributes()) && VerifyOffset(verifier, VT_DOCUMENTATION) && verifier.VerifyVector(documentation()) && verifier.VerifyVectorOfStrings(documentation()) && verifier.EndTable(); } }; struct FieldBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_name(flatbuffers::Offset<flatbuffers::String> name) { fbb_.AddOffset(Field::VT_NAME, name); } void add_type(flatbuffers::Offset<Type> type) { fbb_.AddOffset(Field::VT_TYPE, type); } void add_id(uint16_t id) { fbb_.AddElement<uint16_t>(Field::VT_ID, id, 0); } void add_offset(uint16_t offset) { fbb_.AddElement<uint16_t>(Field::VT_OFFSET, offset, 0); } void add_default_integer(int64_t default_integer) { fbb_.AddElement<int64_t>(Field::VT_DEFAULT_INTEGER, default_integer, 0); } void add_default_real(double default_real) { fbb_.AddElement<double>(Field::VT_DEFAULT_REAL, default_real, 0.0); } void add_deprecated(bool deprecated) { fbb_.AddElement<uint8_t>(Field::VT_DEPRECATED, static_cast<uint8_t>(deprecated), 0); } void add_required(bool required) { fbb_.AddElement<uint8_t>(Field::VT_REQUIRED, static_cast<uint8_t>(required), 0); } void add_key(bool key) { fbb_.AddElement<uint8_t>(Field::VT_KEY, static_cast<uint8_t>(key), 0); } void add_attributes(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes) { fbb_.AddOffset(Field::VT_ATTRIBUTES, attributes); } void add_documentation(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation) { fbb_.AddOffset(Field::VT_DOCUMENTATION, documentation); } explicit FieldBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } FieldBuilder &operator=(const FieldBuilder &); flatbuffers::Offset<Field> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<Field>(end); fbb_.Required(o, Field::VT_NAME); fbb_.Required(o, Field::VT_TYPE); return o; } }; inline flatbuffers::Offset<Field> CreateField( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> name = 0, flatbuffers::Offset<Type> type = 0, uint16_t id = 0, uint16_t offset = 0, int64_t default_integer = 0, double default_real = 0.0, bool deprecated = false, bool required = false, bool key = false, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation = 0) { FieldBuilder builder_(_fbb); builder_.add_default_real(default_real); builder_.add_default_integer(default_integer); builder_.add_documentation(documentation); builder_.add_attributes(attributes); builder_.add_type(type); builder_.add_name(name); builder_.add_offset(offset); builder_.add_id(id); builder_.add_key(key); builder_.add_required(required); builder_.add_deprecated(deprecated); return builder_.Finish(); } inline flatbuffers::Offset<Field> CreateFieldDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, flatbuffers::Offset<Type> type = 0, uint16_t id = 0, uint16_t offset = 0, int64_t default_integer = 0, double default_real = 0.0, bool deprecated = false, bool required = false, bool key = false, const std::vector<flatbuffers::Offset<KeyValue>> *attributes = nullptr, const std::vector<flatbuffers::Offset<flatbuffers::String>> *documentation = nullptr) { return reflection::CreateField( _fbb, name ? _fbb.CreateString(name) : 0, type, id, offset, default_integer, default_real, deprecated, required, key, attributes ? _fbb.CreateVector<flatbuffers::Offset<KeyValue>>(*attributes) : 0, documentation ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*documentation) : 0); } struct Object FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_NAME = 4, VT_FIELDS = 6, VT_IS_STRUCT = 8, VT_MINALIGN = 10, VT_BYTESIZE = 12, VT_ATTRIBUTES = 14, VT_DOCUMENTATION = 16 }; const flatbuffers::String *name() const { return GetPointer<const flatbuffers::String *>(VT_NAME); } bool KeyCompareLessThan(const Object *o) const { return *name() < *o->name(); } int KeyCompareWithValue(const char *val) const { return strcmp(name()->c_str(), val); } const flatbuffers::Vector<flatbuffers::Offset<Field>> *fields() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Field>> *>(VT_FIELDS); } bool is_struct() const { return GetField<uint8_t>(VT_IS_STRUCT, 0) != 0; } int32_t minalign() const { return GetField<int32_t>(VT_MINALIGN, 0); } int32_t bytesize() const { return GetField<int32_t>(VT_BYTESIZE, 0); } const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *attributes() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *>(VT_ATTRIBUTES); } const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *documentation() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DOCUMENTATION); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_NAME) && verifier.VerifyString(name()) && VerifyOffsetRequired(verifier, VT_FIELDS) && verifier.VerifyVector(fields()) && verifier.VerifyVectorOfTables(fields()) && VerifyField<uint8_t>(verifier, VT_IS_STRUCT) && VerifyField<int32_t>(verifier, VT_MINALIGN) && VerifyField<int32_t>(verifier, VT_BYTESIZE) && VerifyOffset(verifier, VT_ATTRIBUTES) && verifier.VerifyVector(attributes()) && verifier.VerifyVectorOfTables(attributes()) && VerifyOffset(verifier, VT_DOCUMENTATION) && verifier.VerifyVector(documentation()) && verifier.VerifyVectorOfStrings(documentation()) && verifier.EndTable(); } }; struct ObjectBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_name(flatbuffers::Offset<flatbuffers::String> name) { fbb_.AddOffset(Object::VT_NAME, name); } void add_fields(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Field>>> fields) { fbb_.AddOffset(Object::VT_FIELDS, fields); } void add_is_struct(bool is_struct) { fbb_.AddElement<uint8_t>(Object::VT_IS_STRUCT, static_cast<uint8_t>(is_struct), 0); } void add_minalign(int32_t minalign) { fbb_.AddElement<int32_t>(Object::VT_MINALIGN, minalign, 0); } void add_bytesize(int32_t bytesize) { fbb_.AddElement<int32_t>(Object::VT_BYTESIZE, bytesize, 0); } void add_attributes(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes) { fbb_.AddOffset(Object::VT_ATTRIBUTES, attributes); } void add_documentation(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation) { fbb_.AddOffset(Object::VT_DOCUMENTATION, documentation); } explicit ObjectBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } ObjectBuilder &operator=(const ObjectBuilder &); flatbuffers::Offset<Object> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<Object>(end); fbb_.Required(o, Object::VT_NAME); fbb_.Required(o, Object::VT_FIELDS); return o; } }; inline flatbuffers::Offset<Object> CreateObject( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> name = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Field>>> fields = 0, bool is_struct = false, int32_t minalign = 0, int32_t bytesize = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation = 0) { ObjectBuilder builder_(_fbb); builder_.add_documentation(documentation); builder_.add_attributes(attributes); builder_.add_bytesize(bytesize); builder_.add_minalign(minalign); builder_.add_fields(fields); builder_.add_name(name); builder_.add_is_struct(is_struct); return builder_.Finish(); } inline flatbuffers::Offset<Object> CreateObjectDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const std::vector<flatbuffers::Offset<Field>> *fields = nullptr, bool is_struct = false, int32_t minalign = 0, int32_t bytesize = 0, const std::vector<flatbuffers::Offset<KeyValue>> *attributes = nullptr, const std::vector<flatbuffers::Offset<flatbuffers::String>> *documentation = nullptr) { return reflection::CreateObject( _fbb, name ? _fbb.CreateString(name) : 0, fields ? _fbb.CreateVector<flatbuffers::Offset<Field>>(*fields) : 0, is_struct, minalign, bytesize, attributes ? _fbb.CreateVector<flatbuffers::Offset<KeyValue>>(*attributes) : 0, documentation ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*documentation) : 0); } struct RPCCall FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_NAME = 4, VT_REQUEST = 6, VT_RESPONSE = 8, VT_ATTRIBUTES = 10, VT_DOCUMENTATION = 12 }; const flatbuffers::String *name() const { return GetPointer<const flatbuffers::String *>(VT_NAME); } bool KeyCompareLessThan(const RPCCall *o) const { return *name() < *o->name(); } int KeyCompareWithValue(const char *val) const { return strcmp(name()->c_str(), val); } const Object *request() const { return GetPointer<const Object *>(VT_REQUEST); } const Object *response() const { return GetPointer<const Object *>(VT_RESPONSE); } const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *attributes() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *>(VT_ATTRIBUTES); } const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *documentation() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DOCUMENTATION); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_NAME) && verifier.VerifyString(name()) && VerifyOffsetRequired(verifier, VT_REQUEST) && verifier.VerifyTable(request()) && VerifyOffsetRequired(verifier, VT_RESPONSE) && verifier.VerifyTable(response()) && VerifyOffset(verifier, VT_ATTRIBUTES) && verifier.VerifyVector(attributes()) && verifier.VerifyVectorOfTables(attributes()) && VerifyOffset(verifier, VT_DOCUMENTATION) && verifier.VerifyVector(documentation()) && verifier.VerifyVectorOfStrings(documentation()) && verifier.EndTable(); } }; struct RPCCallBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_name(flatbuffers::Offset<flatbuffers::String> name) { fbb_.AddOffset(RPCCall::VT_NAME, name); } void add_request(flatbuffers::Offset<Object> request) { fbb_.AddOffset(RPCCall::VT_REQUEST, request); } void add_response(flatbuffers::Offset<Object> response) { fbb_.AddOffset(RPCCall::VT_RESPONSE, response); } void add_attributes(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes) { fbb_.AddOffset(RPCCall::VT_ATTRIBUTES, attributes); } void add_documentation(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation) { fbb_.AddOffset(RPCCall::VT_DOCUMENTATION, documentation); } explicit RPCCallBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } RPCCallBuilder &operator=(const RPCCallBuilder &); flatbuffers::Offset<RPCCall> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<RPCCall>(end); fbb_.Required(o, RPCCall::VT_NAME); fbb_.Required(o, RPCCall::VT_REQUEST); fbb_.Required(o, RPCCall::VT_RESPONSE); return o; } }; inline flatbuffers::Offset<RPCCall> CreateRPCCall( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> name = 0, flatbuffers::Offset<Object> request = 0, flatbuffers::Offset<Object> response = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation = 0) { RPCCallBuilder builder_(_fbb); builder_.add_documentation(documentation); builder_.add_attributes(attributes); builder_.add_response(response); builder_.add_request(request); builder_.add_name(name); return builder_.Finish(); } inline flatbuffers::Offset<RPCCall> CreateRPCCallDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, flatbuffers::Offset<Object> request = 0, flatbuffers::Offset<Object> response = 0, const std::vector<flatbuffers::Offset<KeyValue>> *attributes = nullptr, const std::vector<flatbuffers::Offset<flatbuffers::String>> *documentation = nullptr) { return reflection::CreateRPCCall( _fbb, name ? _fbb.CreateString(name) : 0, request, response, attributes ? _fbb.CreateVector<flatbuffers::Offset<KeyValue>>(*attributes) : 0, documentation ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*documentation) : 0); } struct Service FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_NAME = 4, VT_CALLS = 6, VT_ATTRIBUTES = 8, VT_DOCUMENTATION = 10 }; const flatbuffers::String *name() const { return GetPointer<const flatbuffers::String *>(VT_NAME); } bool KeyCompareLessThan(const Service *o) const { return *name() < *o->name(); } int KeyCompareWithValue(const char *val) const { return strcmp(name()->c_str(), val); } const flatbuffers::Vector<flatbuffers::Offset<RPCCall>> *calls() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<RPCCall>> *>(VT_CALLS); } const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *attributes() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<KeyValue>> *>(VT_ATTRIBUTES); } const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *documentation() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DOCUMENTATION); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_NAME) && verifier.VerifyString(name()) && VerifyOffset(verifier, VT_CALLS) && verifier.VerifyVector(calls()) && verifier.VerifyVectorOfTables(calls()) && VerifyOffset(verifier, VT_ATTRIBUTES) && verifier.VerifyVector(attributes()) && verifier.VerifyVectorOfTables(attributes()) && VerifyOffset(verifier, VT_DOCUMENTATION) && verifier.VerifyVector(documentation()) && verifier.VerifyVectorOfStrings(documentation()) && verifier.EndTable(); } }; struct ServiceBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_name(flatbuffers::Offset<flatbuffers::String> name) { fbb_.AddOffset(Service::VT_NAME, name); } void add_calls(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<RPCCall>>> calls) { fbb_.AddOffset(Service::VT_CALLS, calls); } void add_attributes(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes) { fbb_.AddOffset(Service::VT_ATTRIBUTES, attributes); } void add_documentation(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation) { fbb_.AddOffset(Service::VT_DOCUMENTATION, documentation); } explicit ServiceBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } ServiceBuilder &operator=(const ServiceBuilder &); flatbuffers::Offset<Service> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<Service>(end); fbb_.Required(o, Service::VT_NAME); return o; } }; inline flatbuffers::Offset<Service> CreateService( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::String> name = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<RPCCall>>> calls = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<KeyValue>>> attributes = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> documentation = 0) { ServiceBuilder builder_(_fbb); builder_.add_documentation(documentation); builder_.add_attributes(attributes); builder_.add_calls(calls); builder_.add_name(name); return builder_.Finish(); } inline flatbuffers::Offset<Service> CreateServiceDirect( flatbuffers::FlatBufferBuilder &_fbb, const char *name = nullptr, const std::vector<flatbuffers::Offset<RPCCall>> *calls = nullptr, const std::vector<flatbuffers::Offset<KeyValue>> *attributes = nullptr, const std::vector<flatbuffers::Offset<flatbuffers::String>> *documentation = nullptr) { return reflection::CreateService( _fbb, name ? _fbb.CreateString(name) : 0, calls ? _fbb.CreateVector<flatbuffers::Offset<RPCCall>>(*calls) : 0, attributes ? _fbb.CreateVector<flatbuffers::Offset<KeyValue>>(*attributes) : 0, documentation ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*documentation) : 0); } struct Schema FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { enum { VT_OBJECTS = 4, VT_ENUMS = 6, VT_FILE_IDENT = 8, VT_FILE_EXT = 10, VT_ROOT_TABLE = 12, VT_SERVICES = 14 }; const flatbuffers::Vector<flatbuffers::Offset<Object>> *objects() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Object>> *>(VT_OBJECTS); } const flatbuffers::Vector<flatbuffers::Offset<Enum>> *enums() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Enum>> *>(VT_ENUMS); } const flatbuffers::String *file_ident() const { return GetPointer<const flatbuffers::String *>(VT_FILE_IDENT); } const flatbuffers::String *file_ext() const { return GetPointer<const flatbuffers::String *>(VT_FILE_EXT); } const Object *root_table() const { return GetPointer<const Object *>(VT_ROOT_TABLE); } const flatbuffers::Vector<flatbuffers::Offset<Service>> *services() const { return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Service>> *>(VT_SERVICES); } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffsetRequired(verifier, VT_OBJECTS) && verifier.VerifyVector(objects()) && verifier.VerifyVectorOfTables(objects()) && VerifyOffsetRequired(verifier, VT_ENUMS) && verifier.VerifyVector(enums()) && verifier.VerifyVectorOfTables(enums()) && VerifyOffset(verifier, VT_FILE_IDENT) && verifier.VerifyString(file_ident()) && VerifyOffset(verifier, VT_FILE_EXT) && verifier.VerifyString(file_ext()) && VerifyOffset(verifier, VT_ROOT_TABLE) && verifier.VerifyTable(root_table()) && VerifyOffset(verifier, VT_SERVICES) && verifier.VerifyVector(services()) && verifier.VerifyVectorOfTables(services()) && verifier.EndTable(); } }; struct SchemaBuilder { flatbuffers::FlatBufferBuilder &fbb_; flatbuffers::uoffset_t start_; void add_objects(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Object>>> objects) { fbb_.AddOffset(Schema::VT_OBJECTS, objects); } void add_enums(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Enum>>> enums) { fbb_.AddOffset(Schema::VT_ENUMS, enums); } void add_file_ident(flatbuffers::Offset<flatbuffers::String> file_ident) { fbb_.AddOffset(Schema::VT_FILE_IDENT, file_ident); } void add_file_ext(flatbuffers::Offset<flatbuffers::String> file_ext) { fbb_.AddOffset(Schema::VT_FILE_EXT, file_ext); } void add_root_table(flatbuffers::Offset<Object> root_table) { fbb_.AddOffset(Schema::VT_ROOT_TABLE, root_table); } void add_services(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Service>>> services) { fbb_.AddOffset(Schema::VT_SERVICES, services); } explicit SchemaBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } SchemaBuilder &operator=(const SchemaBuilder &); flatbuffers::Offset<Schema> Finish() { const auto end = fbb_.EndTable(start_); auto o = flatbuffers::Offset<Schema>(end); fbb_.Required(o, Schema::VT_OBJECTS); fbb_.Required(o, Schema::VT_ENUMS); return o; } }; inline flatbuffers::Offset<Schema> CreateSchema( flatbuffers::FlatBufferBuilder &_fbb, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Object>>> objects = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Enum>>> enums = 0, flatbuffers::Offset<flatbuffers::String> file_ident = 0, flatbuffers::Offset<flatbuffers::String> file_ext = 0, flatbuffers::Offset<Object> root_table = 0, flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Service>>> services = 0) { SchemaBuilder builder_(_fbb); builder_.add_services(services); builder_.add_root_table(root_table); builder_.add_file_ext(file_ext); builder_.add_file_ident(file_ident); builder_.add_enums(enums); builder_.add_objects(objects); return builder_.Finish(); } inline flatbuffers::Offset<Schema> CreateSchemaDirect( flatbuffers::FlatBufferBuilder &_fbb, const std::vector<flatbuffers::Offset<Object>> *objects = nullptr, const std::vector<flatbuffers::Offset<Enum>> *enums = nullptr, const char *file_ident = nullptr, const char *file_ext = nullptr, flatbuffers::Offset<Object> root_table = 0, const std::vector<flatbuffers::Offset<Service>> *services = nullptr) { return reflection::CreateSchema( _fbb, objects ? _fbb.CreateVector<flatbuffers::Offset<Object>>(*objects) : 0, enums ? _fbb.CreateVector<flatbuffers::Offset<Enum>>(*enums) : 0, file_ident ? _fbb.CreateString(file_ident) : 0, file_ext ? _fbb.CreateString(file_ext) : 0, root_table, services ? _fbb.CreateVector<flatbuffers::Offset<Service>>(*services) : 0); } inline const reflection::Schema *GetSchema(const void *buf) { return flatbuffers::GetRoot<reflection::Schema>(buf); } inline const reflection::Schema *GetSizePrefixedSchema(const void *buf) { return flatbuffers::GetSizePrefixedRoot<reflection::Schema>(buf); } inline const char *SchemaIdentifier() { return "BFBS"; } inline bool SchemaBufferHasIdentifier(const void *buf) { return flatbuffers::BufferHasIdentifier( buf, SchemaIdentifier()); } inline bool VerifySchemaBuffer( flatbuffers::Verifier &verifier) { return verifier.VerifyBuffer<reflection::Schema>(SchemaIdentifier()); } inline bool VerifySizePrefixedSchemaBuffer( flatbuffers::Verifier &verifier) { return verifier.VerifySizePrefixedBuffer<reflection::Schema>(SchemaIdentifier()); } inline const char *SchemaExtension() { return "bfbs"; } inline void FinishSchemaBuffer( flatbuffers::FlatBufferBuilder &fbb, flatbuffers::Offset<reflection::Schema> root) { fbb.Finish(root, SchemaIdentifier()); } inline void FinishSizePrefixedSchemaBuffer( flatbuffers::FlatBufferBuilder &fbb, flatbuffers::Offset<reflection::Schema> root) { fbb.FinishSizePrefixed(root, SchemaIdentifier()); } } // namespace reflection #endif // FLATBUFFERS_GENERATED_REFLECTION_REFLECTION_H_
rw/flatbuffers
include/flatbuffers/reflection_generated.h
C
apache-2.0
41,277
<?php namespace app\widgets\jqGrid; /** * Created by PhpStorm. * User: Андрей * Date: 21.10.2015 * Time: 1:52 */ use yii\base\Event; class EventBeforeSave extends Event { public $model; public $get; public $post; }
andriell/yii2-gii
widgets/jqGrid/EventBeforeSave.php
PHP
apache-2.0
240
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.spark.testsuite.datacompaction import scala.collection.JavaConverters._ import org.scalatest.BeforeAndAfterAll import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.metadata.CarbonMetadata import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusManager} import org.apache.carbondata.core.util.CarbonProperties import org.apache.spark.sql.test.util.QueryTest /** * FT for compaction scenario where major segment should not be included in minor. */ class MajorCompactionIgnoreInMinorTest extends QueryTest with BeforeAndAfterAll { val csvFilePath1 = s"$resourcesPath/compaction/compaction1.csv" val csvFilePath2 = s"$resourcesPath/compaction/compaction2.csv" val csvFilePath3 = s"$resourcesPath/compaction/compaction3.csv" override def beforeAll { } def createTableAndLoadData(): Unit = { CarbonProperties.getInstance().addProperty("carbon.compaction.level.threshold", "2,2") sql("drop table if exists ignoremajor") CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/yyyy") sql( "CREATE TABLE IF NOT EXISTS ignoremajor (country String, ID Int, date Timestamp, name " + "String, phonetype String, serialname String, salary Int) STORED AS carbondata" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE ignoremajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE ignoremajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) // compaction will happen here. sql("alter table ignoremajor compact 'major'") sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE ignoremajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE ignoremajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("alter table ignoremajor compact 'minor'" ) } /** * Delete should not work on compacted segment. */ test("delete compacted segment and check status") { createTableAndLoadData() intercept[Throwable] { sql("delete from table ignoremajor where segment.id in (2)") } val carbonTable = CarbonMetadata.getInstance().getCarbonTable( CarbonCommonConstants.DATABASE_DEFAULT_NAME, "ignoremajor" ) val carbonTablePath = carbonTable.getMetadataPath val segs = SegmentStatusManager.readLoadMetadata(carbonTablePath) // status should remain as compacted. assertResult(SegmentStatus.COMPACTED)(segs(3).getSegmentStatus) } /** * Delete should not work on compacted segment. */ test("delete compacted segment by date and check status") { createTableAndLoadData() sql( "delete from table ignoremajor where segment.starttime before " + " '2222-01-01 19:35:01'" ) val carbonTable = CarbonMetadata.getInstance().getCarbonTable( CarbonCommonConstants.DATABASE_DEFAULT_NAME, "ignoremajor" ) val carbontablePath = carbonTable.getMetadataPath val segs = SegmentStatusManager.readLoadMetadata(carbontablePath) // status should remain as compacted for segment 2. assertResult(SegmentStatus.COMPACTED)(segs(3).getSegmentStatus) // for segment 0.1 . should get deleted assertResult(SegmentStatus.MARKED_FOR_DELETE)(segs(2).getSegmentStatus) } /** * Test whether major compaction is not included in minor compaction. */ test("delete merged folder and check segments") { createTableAndLoadData() // delete merged segments sql("clean files for table ignoremajor") val carbonTable = CarbonMetadata.getInstance().getCarbonTable( CarbonCommonConstants.DATABASE_DEFAULT_NAME, "ignoremajor" ) val absoluteTableIdentifier = carbonTable .getAbsoluteTableIdentifier val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager( absoluteTableIdentifier) // merged segment should not be there val segments = segmentStatusManager.getValidAndInvalidSegments.getValidSegments.asScala.map(_.getSegmentNo).toList assert(segments.contains("0.1")) assert(segments.contains("2.1")) assert(!segments.contains("2")) assert(!segments.contains("3")) } /** * Execute two major compactions sequentially */ test("Execute two major compactions sequentially") { sql("drop table if exists testmajor") sql( "CREATE TABLE IF NOT EXISTS testmajor (country String, ID Int, date Timestamp, name " + "String, phonetype String, serialname String, salary Int) STORED AS carbondata" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE testmajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE testmajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) // compaction will happen here. sql("alter table testmajor compact 'major'") sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE testmajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE testmajor OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("alter table testmajor compact 'major'") val carbonTable = CarbonMetadata.getInstance().getCarbonTable( CarbonCommonConstants.DATABASE_DEFAULT_NAME, "testmajor" ) val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager( absoluteTableIdentifier) // merged segment should not be there val segments = segmentStatusManager.getValidAndInvalidSegments.getValidSegments.asScala.map(_.getSegmentNo).toList assert(!segments.contains("0.1")) assert(segments.contains("0.2")) assert(!segments.contains("2")) assert(!segments.contains("3")) } override def afterAll { CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT) sql("drop table if exists ignoremajor") sql("drop table if exists testmajor") } }
jackylk/incubator-carbondata
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
Scala
apache-2.0
7,171
#!/bin/bash SRCFILE=TestDFSIO.java HADOOPSRC=/home/weijia/ws/hadoop-crtc-src DSTFILE=${HADOOPSRC}/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java cp $SRCFILE $DSTFILE
songweijia/hdfsrs
experiments/snapshot/run/src/updateTestDFSIO.sh
Shell
apache-2.0
253
/** * */ package com.perforce.p4java.tests.dev.unit.bug.r111; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; import com.perforce.p4java.client.IClient; import com.perforce.p4java.core.CoreFactory; import com.perforce.p4java.core.IChangelist; import com.perforce.p4java.core.file.FileSpecBuilder; import com.perforce.p4java.core.file.FileStatAncilliaryOptions; import com.perforce.p4java.core.file.IExtendedFileSpec; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.option.client.EditFilesOptions; import com.perforce.p4java.option.client.RevertFilesOptions; import com.perforce.p4java.option.server.GetExtendedFilesOptions; import com.perforce.p4java.option.server.SetFileAttributesOptions; import com.perforce.p4java.server.IOptionsServer; import com.perforce.p4java.tests.dev.annotations.TestId; import com.perforce.p4java.tests.dev.unit.P4JavaTestCase; /** * Tests for file attribute visibility when a file is open * for edit (Job042258). */ @TestId("Bugs111_Job042258Test") public class Job042258Test extends P4JavaTestCase { public Job042258Test() { } @Test public void testAttributeRetrieval() { IOptionsServer server = null; IClient client = null; IChangelist changelist = null; final String description = "Test changelist for test " + testId; final String testRoot = "//depot/111bugs/Bugs111_Job042258Test"; final String testFile = testRoot + "/test01.txt"; final List<IFileSpec> testFiles = FileSpecBuilder.makeFileSpecList(testFile); final String attrName = this.getRandomName("test1"); final String attrValue = this.getRandomName("value"); try { server = getServer(); client = getDefaultClient(server); assertNotNull("null client returned", client); server.setCurrentClient(client); List<IFileSpec> syncFiles = this.forceSyncFiles(client, testRoot + "/..."); assertNotNull("bad forced sync", syncFiles); changelist = CoreFactory.createChangelist(client, description, true); assertNotNull("changelist not created", changelist); List<IFileSpec> editFiles = client.editFiles(testFiles, new EditFilesOptions().setChangelistId(changelist.getId())); assertNotNull("null file list returned from edit", editFiles); assertEquals("edit error", 0, FileSpecBuilder.getInvalidFileSpecs(editFiles).size()); Map<String, String> attributes = new HashMap<String, String>(); attributes.put(attrName, attrValue); List<IFileSpec> attrSpecs = server.setFileAttributes(testFiles, attributes, new SetFileAttributesOptions()); assertNotNull("null filespecs from set attributes operation", attrSpecs); assertEquals("set attributes error", 0, FileSpecBuilder.getInvalidFileSpecs(attrSpecs).size()); FileStatAncilliaryOptions fsaOpts = new FileStatAncilliaryOptions(); fsaOpts.setShowAttributes(true); List<IExtendedFileSpec> statFiles = server.getExtendedFiles(testFiles, new GetExtendedFilesOptions().setAncilliaryOptions(fsaOpts)); assertNotNull("null return from fstat", statFiles); IExtendedFileSpec attrFile = statFiles.get(0); assertNotNull("null attribute file", attrFile); assertNotNull("null attributes in retrieved open file", attrFile.getAttributes()); assertNotNull("expected file attribute missing", attrFile.getAttributes().get(attrName)); assertEquals("expected attribute value wrong", attrValue, new String(attrFile.getAttributes().get(attrName))); } catch (Exception exc) { fail("Unexpected exception: " + exc.getLocalizedMessage()); } finally { if (server != null) { if ((client != null) && (changelist != null)) { try { client.revertFiles(testFiles, new RevertFilesOptions().setChangelistId(changelist.getId())); server.deletePendingChangelist(changelist.getId()); } catch (P4JavaException e) { // at least we tried... } } this.endServerSession(server); } } } }
groboclown/p4ic4idea
p4java/r18-1/src/test/java/com/perforce/p4java/tests/dev/unit/bug/r111/Job042258Test.java
Java
apache-2.0
4,146
#! /bin/bash DPLID=$(ctx deployment id) ctx logger info "disabling monitoring for deployment ${DPLID}" MONITORING_DIR="$BASE_DIR/${DPLID}" read PID < $MONITORING_DIR/pid_file sudo kill -9 $PID (crontab -l ; cat $MONITORING_DIR/policycron) 2>&1 | grep -v "no crontab" | grep -v $MONITORING_DIR/log | sort | uniq | crontab - ctx logger info "monitoring cron job for deployment ${DPLID}, nodes ${NTM} removed" rm -rf $MONITORING_DIR
victorkeophila/alien4cloud-cloudify3-provider
src/main/resources/recipe/monitor/scripts/policy-stop.sh
Shell
apache-2.0
435
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.operator.docvaliterators; import com.linkedin.pinot.common.data.FieldSpec.DataType; import com.linkedin.pinot.core.common.BlockSingleValIterator; import com.linkedin.pinot.core.common.Constants; import com.linkedin.pinot.core.io.reader.ReaderContext; import com.linkedin.pinot.core.io.reader.impl.SortedForwardIndexReader; import com.linkedin.pinot.core.io.reader.impl.SortedValueReaderContext; public final class SortedSingleValueIterator extends BlockSingleValIterator { private int counter = 0; private SortedForwardIndexReader sVReader; SortedValueReaderContext context; public SortedSingleValueIterator(SortedForwardIndexReader sVReader) { this.sVReader = sVReader; this.context = sVReader.createContext(); } @Override public boolean skipTo(int docId) { if (docId >= sVReader.getLength()) { return false; } counter = docId; return true; } @Override public int size() { return sVReader.getLength(); } @Override public int nextIntVal() { if (counter >= sVReader.getLength()) { return Constants.EOF; } return sVReader.getInt(counter++, context); } @Override public boolean reset() { counter = 0; return true; } @Override public boolean next() { // TODO Auto-generated method stub return false; } @Override public boolean hasNext() { return (counter < sVReader.getLength()); } @Override public DataType getValueType() { // TODO Auto-generated method stub return null; } @Override public int currentDocId() { return counter; } }
pinotlytics/pinot
pinot-core/src/main/java/com/linkedin/pinot/core/operator/docvaliterators/SortedSingleValueIterator.java
Java
apache-2.0
2,253
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package aurelienribon.gdxsetupui; import aurelienribon.utils.ParseUtils; import java.util.List; /** * Skeleton for all the parameters related to a library definition. * @author Aurelien Ribon | http://www.aurelienribon.com/ */ public class LibraryDef { public final String name; public final String author; public final String authorWebsite; public final String description; public final String homepage; public final String logo; public final String gwtModuleName; public final String stableVersion; public final String stableUrl; public final String latestUrl; public final List<String> libsCommon; public final List<String> libsDesktop; public final List<String> libsAndroid; public final List<String> libsHtml; public final List<String> libsRobovm; public final List<String> data; /** * Creates a library definition by parsing the given text. If a parameter * block is not found, it is replaced by a standard content. */ public LibraryDef(String content) { this.name = ParseUtils.parseBlock(content, "name", "<unknown>"); this.author = ParseUtils.parseBlock(content, "author", "<unknown>"); this.authorWebsite = ParseUtils.parseBlock(content, "author-website", null); this.description = ParseUtils.parseBlock(content, "description", "").replaceAll("\\s+", " "); this.homepage = ParseUtils.parseBlock(content, "homepage", null); this.logo = ParseUtils.parseBlock(content, "logo", null); this.gwtModuleName = ParseUtils.parseBlock(content, "gwt", null); this.stableVersion = ParseUtils.parseBlock(content, "stable-version", "<unknown>"); this.stableUrl = ParseUtils.parseBlock(content, "stable-url", null); this.latestUrl = ParseUtils.parseBlock(content, "latest-url", null); this.libsCommon = ParseUtils.parseBlockAsList(content, "libs-common"); this.libsDesktop = ParseUtils.parseBlockAsList(content, "libs-desktop"); this.libsAndroid = ParseUtils.parseBlockAsList(content, "libs-android"); this.libsHtml = ParseUtils.parseBlockAsList(content, "libs-html"); this.libsRobovm = ParseUtils.parseBlockAsList(content, "libs-robovm"); this.data = ParseUtils.parseBlockAsList(content, "data"); } }
ryoenji/libgdx
extensions/gdx-setup-ui/src/aurelienribon/gdxsetupui/LibraryDef.java
Java
apache-2.0
2,914
<?php final class PhabricatorRepositoryShortcut extends PhabricatorRepositoryDAO { protected $name; protected $href; protected $description; protected $sequence; public function getConfiguration() { return array( self::CONFIG_TIMESTAMPS => false, ) + parent::getConfiguration(); } }
apexstudios/phabricator
src/applications/repository/storage/PhabricatorRepositoryShortcut.php
PHP
apache-2.0
313
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("EntrySwell.UWP")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("EntrySwell.UWP")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")] [assembly: ComVisible(false)]
xamarin/xamarin-forms-book-preview-2
Chapter23/EntrySwell/EntrySwell/EntrySwell.UWP/Properties/AssemblyInfo.cs
C#
apache-2.0
1,048
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Wed, Nov 11, 2015 10:54-0500 for FHIR v1.0.2 import java.util.*; import org.hl7.fhir.utilities.Utilities; import org.hl7.fhir.instance.model.Enumerations.*; import org.hl7.fhir.instance.model.annotations.ResourceDef; import org.hl7.fhir.instance.model.annotations.SearchParamDefinition; import org.hl7.fhir.instance.model.annotations.Child; import org.hl7.fhir.instance.model.annotations.Description; import org.hl7.fhir.instance.model.annotations.Block; import org.hl7.fhir.instance.model.api.*; /** * A definition of a FHIR structure. This resource is used to describe the underlying resources, data types defined in FHIR, and also for describing extensions, and constraints on resources and data types. */ @ResourceDef(name="StructureDefinition", profile="http://hl7.org/fhir/Profile/StructureDefinition") public class StructureDefinition extends DomainResource { public enum StructureDefinitionKind { /** * A data type - either a primitive or complex structure that defines a set of data elements. These can be used throughout Resource and extension definitions. */ DATATYPE, /** * A resource defined by the FHIR specification. */ RESOURCE, /** * A logical model - a conceptual package of data that will be mapped to resources for implementation. */ LOGICAL, /** * added to help the parsers */ NULL; public static StructureDefinitionKind fromCode(String codeString) throws Exception { if (codeString == null || "".equals(codeString)) return null; if ("datatype".equals(codeString)) return DATATYPE; if ("resource".equals(codeString)) return RESOURCE; if ("logical".equals(codeString)) return LOGICAL; throw new Exception("Unknown StructureDefinitionKind code '"+codeString+"'"); } public String toCode() { switch (this) { case DATATYPE: return "datatype"; case RESOURCE: return "resource"; case LOGICAL: return "logical"; default: return "?"; } } public String getSystem() { switch (this) { case DATATYPE: return "http://hl7.org/fhir/structure-definition-kind"; case RESOURCE: return "http://hl7.org/fhir/structure-definition-kind"; case LOGICAL: return "http://hl7.org/fhir/structure-definition-kind"; default: return "?"; } } public String getDefinition() { switch (this) { case DATATYPE: return "A data type - either a primitive or complex structure that defines a set of data elements. These can be used throughout Resource and extension definitions."; case RESOURCE: return "A resource defined by the FHIR specification."; case LOGICAL: return "A logical model - a conceptual package of data that will be mapped to resources for implementation."; default: return "?"; } } public String getDisplay() { switch (this) { case DATATYPE: return "Data Type"; case RESOURCE: return "Resource"; case LOGICAL: return "Logical Model"; default: return "?"; } } } public static class StructureDefinitionKindEnumFactory implements EnumFactory<StructureDefinitionKind> { public StructureDefinitionKind fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) if (codeString == null || "".equals(codeString)) return null; if ("datatype".equals(codeString)) return StructureDefinitionKind.DATATYPE; if ("resource".equals(codeString)) return StructureDefinitionKind.RESOURCE; if ("logical".equals(codeString)) return StructureDefinitionKind.LOGICAL; throw new IllegalArgumentException("Unknown StructureDefinitionKind code '"+codeString+"'"); } public String toCode(StructureDefinitionKind code) { if (code == StructureDefinitionKind.DATATYPE) return "datatype"; if (code == StructureDefinitionKind.RESOURCE) return "resource"; if (code == StructureDefinitionKind.LOGICAL) return "logical"; return "?"; } } public enum ExtensionContext { /** * The context is all elements matching a particular resource element path. */ RESOURCE, /** * The context is all nodes matching a particular data type element path (root or repeating element) or all elements referencing a particular primitive data type (expressed as the datatype name). */ DATATYPE, /** * The context is all nodes whose mapping to a specified reference model corresponds to a particular mapping structure. The context identifies the mapping target. The mapping should clearly identify where such an extension could be used. */ MAPPING, /** * The context is a particular extension from a particular profile, a uri that identifies the extension definition. */ EXTENSION, /** * added to help the parsers */ NULL; public static ExtensionContext fromCode(String codeString) throws Exception { if (codeString == null || "".equals(codeString)) return null; if ("resource".equals(codeString)) return RESOURCE; if ("datatype".equals(codeString)) return DATATYPE; if ("mapping".equals(codeString)) return MAPPING; if ("extension".equals(codeString)) return EXTENSION; throw new Exception("Unknown ExtensionContext code '"+codeString+"'"); } public String toCode() { switch (this) { case RESOURCE: return "resource"; case DATATYPE: return "datatype"; case MAPPING: return "mapping"; case EXTENSION: return "extension"; default: return "?"; } } public String getSystem() { switch (this) { case RESOURCE: return "http://hl7.org/fhir/extension-context"; case DATATYPE: return "http://hl7.org/fhir/extension-context"; case MAPPING: return "http://hl7.org/fhir/extension-context"; case EXTENSION: return "http://hl7.org/fhir/extension-context"; default: return "?"; } } public String getDefinition() { switch (this) { case RESOURCE: return "The context is all elements matching a particular resource element path."; case DATATYPE: return "The context is all nodes matching a particular data type element path (root or repeating element) or all elements referencing a particular primitive data type (expressed as the datatype name)."; case MAPPING: return "The context is all nodes whose mapping to a specified reference model corresponds to a particular mapping structure. The context identifies the mapping target. The mapping should clearly identify where such an extension could be used."; case EXTENSION: return "The context is a particular extension from a particular profile, a uri that identifies the extension definition."; default: return "?"; } } public String getDisplay() { switch (this) { case RESOURCE: return "Resource"; case DATATYPE: return "Datatype"; case MAPPING: return "Mapping"; case EXTENSION: return "Extension"; default: return "?"; } } } public static class ExtensionContextEnumFactory implements EnumFactory<ExtensionContext> { public ExtensionContext fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) if (codeString == null || "".equals(codeString)) return null; if ("resource".equals(codeString)) return ExtensionContext.RESOURCE; if ("datatype".equals(codeString)) return ExtensionContext.DATATYPE; if ("mapping".equals(codeString)) return ExtensionContext.MAPPING; if ("extension".equals(codeString)) return ExtensionContext.EXTENSION; throw new IllegalArgumentException("Unknown ExtensionContext code '"+codeString+"'"); } public String toCode(ExtensionContext code) { if (code == ExtensionContext.RESOURCE) return "resource"; if (code == ExtensionContext.DATATYPE) return "datatype"; if (code == ExtensionContext.MAPPING) return "mapping"; if (code == ExtensionContext.EXTENSION) return "extension"; return "?"; } } @Block() public static class StructureDefinitionContactComponent extends BackboneElement implements IBaseBackboneElement { /** * The name of an individual to contact regarding the structure definition. */ @Child(name = "name", type = {StringType.class}, order=1, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Name of a individual to contact", formalDefinition="The name of an individual to contact regarding the structure definition." ) protected StringType name; /** * Contact details for individual (if a name was provided) or the publisher. */ @Child(name = "telecom", type = {ContactPoint.class}, order=2, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Contact details for individual or publisher", formalDefinition="Contact details for individual (if a name was provided) or the publisher." ) protected List<ContactPoint> telecom; private static final long serialVersionUID = -1179697803L; /* * Constructor */ public StructureDefinitionContactComponent() { super(); } /** * @return {@link #name} (The name of an individual to contact regarding the structure definition.). This is the underlying object with id, value and extensions. The accessor "getName" gives direct access to the value */ public StringType getNameElement() { if (this.name == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinitionContactComponent.name"); else if (Configuration.doAutoCreate()) this.name = new StringType(); // bb return this.name; } public boolean hasNameElement() { return this.name != null && !this.name.isEmpty(); } public boolean hasName() { return this.name != null && !this.name.isEmpty(); } /** * @param value {@link #name} (The name of an individual to contact regarding the structure definition.). This is the underlying object with id, value and extensions. The accessor "getName" gives direct access to the value */ public StructureDefinitionContactComponent setNameElement(StringType value) { this.name = value; return this; } /** * @return The name of an individual to contact regarding the structure definition. */ public String getName() { return this.name == null ? null : this.name.getValue(); } /** * @param value The name of an individual to contact regarding the structure definition. */ public StructureDefinitionContactComponent setName(String value) { if (Utilities.noString(value)) this.name = null; else { if (this.name == null) this.name = new StringType(); this.name.setValue(value); } return this; } /** * @return {@link #telecom} (Contact details for individual (if a name was provided) or the publisher.) */ public List<ContactPoint> getTelecom() { if (this.telecom == null) this.telecom = new ArrayList<ContactPoint>(); return this.telecom; } public boolean hasTelecom() { if (this.telecom == null) return false; for (ContactPoint item : this.telecom) if (!item.isEmpty()) return true; return false; } /** * @return {@link #telecom} (Contact details for individual (if a name was provided) or the publisher.) */ // syntactic sugar public ContactPoint addTelecom() { //3 ContactPoint t = new ContactPoint(); if (this.telecom == null) this.telecom = new ArrayList<ContactPoint>(); this.telecom.add(t); return t; } // syntactic sugar public StructureDefinitionContactComponent addTelecom(ContactPoint t) { //3 if (t == null) return this; if (this.telecom == null) this.telecom = new ArrayList<ContactPoint>(); this.telecom.add(t); return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("name", "string", "The name of an individual to contact regarding the structure definition.", 0, java.lang.Integer.MAX_VALUE, name)); childrenList.add(new Property("telecom", "ContactPoint", "Contact details for individual (if a name was provided) or the publisher.", 0, java.lang.Integer.MAX_VALUE, telecom)); } public StructureDefinitionContactComponent copy() { StructureDefinitionContactComponent dst = new StructureDefinitionContactComponent(); copyValues(dst); dst.name = name == null ? null : name.copy(); if (telecom != null) { dst.telecom = new ArrayList<ContactPoint>(); for (ContactPoint i : telecom) dst.telecom.add(i.copy()); }; return dst; } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof StructureDefinitionContactComponent)) return false; StructureDefinitionContactComponent o = (StructureDefinitionContactComponent) other; return compareDeep(name, o.name, true) && compareDeep(telecom, o.telecom, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof StructureDefinitionContactComponent)) return false; StructureDefinitionContactComponent o = (StructureDefinitionContactComponent) other; return compareValues(name, o.name, true); } public boolean isEmpty() { return super.isEmpty() && (name == null || name.isEmpty()) && (telecom == null || telecom.isEmpty()) ; } } @Block() public static class StructureDefinitionMappingComponent extends BackboneElement implements IBaseBackboneElement { /** * An Internal id that is used to identify this mapping set when specific mappings are made. */ @Child(name = "identity", type = {IdType.class}, order=1, min=1, max=1, modifier=false, summary=false) @Description(shortDefinition="Internal id when this mapping is used", formalDefinition="An Internal id that is used to identify this mapping set when specific mappings are made." ) protected IdType identity; /** * An absolute URI that identifies the specification that this mapping is expressed to. */ @Child(name = "uri", type = {UriType.class}, order=2, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Identifies what this mapping refers to", formalDefinition="An absolute URI that identifies the specification that this mapping is expressed to." ) protected UriType uri; /** * A name for the specification that is being mapped to. */ @Child(name = "name", type = {StringType.class}, order=3, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Names what this mapping refers to", formalDefinition="A name for the specification that is being mapped to." ) protected StringType name; /** * Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage. */ @Child(name = "comments", type = {StringType.class}, order=4, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Versions, Issues, Scope limitations etc.", formalDefinition="Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage." ) protected StringType comments; private static final long serialVersionUID = 299630820L; /* * Constructor */ public StructureDefinitionMappingComponent() { super(); } /* * Constructor */ public StructureDefinitionMappingComponent(IdType identity) { super(); this.identity = identity; } /** * @return {@link #identity} (An Internal id that is used to identify this mapping set when specific mappings are made.). This is the underlying object with id, value and extensions. The accessor "getIdentity" gives direct access to the value */ public IdType getIdentityElement() { if (this.identity == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinitionMappingComponent.identity"); else if (Configuration.doAutoCreate()) this.identity = new IdType(); // bb return this.identity; } public boolean hasIdentityElement() { return this.identity != null && !this.identity.isEmpty(); } public boolean hasIdentity() { return this.identity != null && !this.identity.isEmpty(); } /** * @param value {@link #identity} (An Internal id that is used to identify this mapping set when specific mappings are made.). This is the underlying object with id, value and extensions. The accessor "getIdentity" gives direct access to the value */ public StructureDefinitionMappingComponent setIdentityElement(IdType value) { this.identity = value; return this; } /** * @return An Internal id that is used to identify this mapping set when specific mappings are made. */ public String getIdentity() { return this.identity == null ? null : this.identity.getValue(); } /** * @param value An Internal id that is used to identify this mapping set when specific mappings are made. */ public StructureDefinitionMappingComponent setIdentity(String value) { if (this.identity == null) this.identity = new IdType(); this.identity.setValue(value); return this; } /** * @return {@link #uri} (An absolute URI that identifies the specification that this mapping is expressed to.). This is the underlying object with id, value and extensions. The accessor "getUri" gives direct access to the value */ public UriType getUriElement() { if (this.uri == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinitionMappingComponent.uri"); else if (Configuration.doAutoCreate()) this.uri = new UriType(); // bb return this.uri; } public boolean hasUriElement() { return this.uri != null && !this.uri.isEmpty(); } public boolean hasUri() { return this.uri != null && !this.uri.isEmpty(); } /** * @param value {@link #uri} (An absolute URI that identifies the specification that this mapping is expressed to.). This is the underlying object with id, value and extensions. The accessor "getUri" gives direct access to the value */ public StructureDefinitionMappingComponent setUriElement(UriType value) { this.uri = value; return this; } /** * @return An absolute URI that identifies the specification that this mapping is expressed to. */ public String getUri() { return this.uri == null ? null : this.uri.getValue(); } /** * @param value An absolute URI that identifies the specification that this mapping is expressed to. */ public StructureDefinitionMappingComponent setUri(String value) { if (Utilities.noString(value)) this.uri = null; else { if (this.uri == null) this.uri = new UriType(); this.uri.setValue(value); } return this; } /** * @return {@link #name} (A name for the specification that is being mapped to.). This is the underlying object with id, value and extensions. The accessor "getName" gives direct access to the value */ public StringType getNameElement() { if (this.name == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinitionMappingComponent.name"); else if (Configuration.doAutoCreate()) this.name = new StringType(); // bb return this.name; } public boolean hasNameElement() { return this.name != null && !this.name.isEmpty(); } public boolean hasName() { return this.name != null && !this.name.isEmpty(); } /** * @param value {@link #name} (A name for the specification that is being mapped to.). This is the underlying object with id, value and extensions. The accessor "getName" gives direct access to the value */ public StructureDefinitionMappingComponent setNameElement(StringType value) { this.name = value; return this; } /** * @return A name for the specification that is being mapped to. */ public String getName() { return this.name == null ? null : this.name.getValue(); } /** * @param value A name for the specification that is being mapped to. */ public StructureDefinitionMappingComponent setName(String value) { if (Utilities.noString(value)) this.name = null; else { if (this.name == null) this.name = new StringType(); this.name.setValue(value); } return this; } /** * @return {@link #comments} (Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage.). This is the underlying object with id, value and extensions. The accessor "getComments" gives direct access to the value */ public StringType getCommentsElement() { if (this.comments == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinitionMappingComponent.comments"); else if (Configuration.doAutoCreate()) this.comments = new StringType(); // bb return this.comments; } public boolean hasCommentsElement() { return this.comments != null && !this.comments.isEmpty(); } public boolean hasComments() { return this.comments != null && !this.comments.isEmpty(); } /** * @param value {@link #comments} (Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage.). This is the underlying object with id, value and extensions. The accessor "getComments" gives direct access to the value */ public StructureDefinitionMappingComponent setCommentsElement(StringType value) { this.comments = value; return this; } /** * @return Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage. */ public String getComments() { return this.comments == null ? null : this.comments.getValue(); } /** * @param value Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage. */ public StructureDefinitionMappingComponent setComments(String value) { if (Utilities.noString(value)) this.comments = null; else { if (this.comments == null) this.comments = new StringType(); this.comments.setValue(value); } return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("identity", "id", "An Internal id that is used to identify this mapping set when specific mappings are made.", 0, java.lang.Integer.MAX_VALUE, identity)); childrenList.add(new Property("uri", "uri", "An absolute URI that identifies the specification that this mapping is expressed to.", 0, java.lang.Integer.MAX_VALUE, uri)); childrenList.add(new Property("name", "string", "A name for the specification that is being mapped to.", 0, java.lang.Integer.MAX_VALUE, name)); childrenList.add(new Property("comments", "string", "Comments about this mapping, including version notes, issues, scope limitations, and other important notes for usage.", 0, java.lang.Integer.MAX_VALUE, comments)); } public StructureDefinitionMappingComponent copy() { StructureDefinitionMappingComponent dst = new StructureDefinitionMappingComponent(); copyValues(dst); dst.identity = identity == null ? null : identity.copy(); dst.uri = uri == null ? null : uri.copy(); dst.name = name == null ? null : name.copy(); dst.comments = comments == null ? null : comments.copy(); return dst; } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof StructureDefinitionMappingComponent)) return false; StructureDefinitionMappingComponent o = (StructureDefinitionMappingComponent) other; return compareDeep(identity, o.identity, true) && compareDeep(uri, o.uri, true) && compareDeep(name, o.name, true) && compareDeep(comments, o.comments, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof StructureDefinitionMappingComponent)) return false; StructureDefinitionMappingComponent o = (StructureDefinitionMappingComponent) other; return compareValues(identity, o.identity, true) && compareValues(uri, o.uri, true) && compareValues(name, o.name, true) && compareValues(comments, o.comments, true); } public boolean isEmpty() { return super.isEmpty() && (identity == null || identity.isEmpty()) && (uri == null || uri.isEmpty()) && (name == null || name.isEmpty()) && (comments == null || comments.isEmpty()); } } @Block() public static class StructureDefinitionSnapshotComponent extends BackboneElement implements IBaseBackboneElement { /** * Captures constraints on each element within the resource. */ @Child(name = "element", type = {ElementDefinition.class}, order=1, min=1, max=Child.MAX_UNLIMITED, modifier=false, summary=false) @Description(shortDefinition="Definition of elements in the resource (if no StructureDefinition)", formalDefinition="Captures constraints on each element within the resource." ) protected List<ElementDefinition> element; private static final long serialVersionUID = 53896641L; /* * Constructor */ public StructureDefinitionSnapshotComponent() { super(); } /** * @return {@link #element} (Captures constraints on each element within the resource.) */ public List<ElementDefinition> getElement() { if (this.element == null) this.element = new ArrayList<ElementDefinition>(); return this.element; } public boolean hasElement() { if (this.element == null) return false; for (ElementDefinition item : this.element) if (!item.isEmpty()) return true; return false; } /** * @return {@link #element} (Captures constraints on each element within the resource.) */ // syntactic sugar public ElementDefinition addElement() { //3 ElementDefinition t = new ElementDefinition(); if (this.element == null) this.element = new ArrayList<ElementDefinition>(); this.element.add(t); return t; } // syntactic sugar public StructureDefinitionSnapshotComponent addElement(ElementDefinition t) { //3 if (t == null) return this; if (this.element == null) this.element = new ArrayList<ElementDefinition>(); this.element.add(t); return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("element", "ElementDefinition", "Captures constraints on each element within the resource.", 0, java.lang.Integer.MAX_VALUE, element)); } public StructureDefinitionSnapshotComponent copy() { StructureDefinitionSnapshotComponent dst = new StructureDefinitionSnapshotComponent(); copyValues(dst); if (element != null) { dst.element = new ArrayList<ElementDefinition>(); for (ElementDefinition i : element) dst.element.add(i.copy()); }; return dst; } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof StructureDefinitionSnapshotComponent)) return false; StructureDefinitionSnapshotComponent o = (StructureDefinitionSnapshotComponent) other; return compareDeep(element, o.element, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof StructureDefinitionSnapshotComponent)) return false; StructureDefinitionSnapshotComponent o = (StructureDefinitionSnapshotComponent) other; return true; } public boolean isEmpty() { return super.isEmpty() && (element == null || element.isEmpty()); } } @Block() public static class StructureDefinitionDifferentialComponent extends BackboneElement implements IBaseBackboneElement { /** * Captures constraints on each element within the resource. */ @Child(name = "element", type = {ElementDefinition.class}, order=1, min=1, max=Child.MAX_UNLIMITED, modifier=false, summary=false) @Description(shortDefinition="Definition of elements in the resource (if no StructureDefinition)", formalDefinition="Captures constraints on each element within the resource." ) protected List<ElementDefinition> element; private static final long serialVersionUID = 53896641L; /* * Constructor */ public StructureDefinitionDifferentialComponent() { super(); } /** * @return {@link #element} (Captures constraints on each element within the resource.) */ public List<ElementDefinition> getElement() { if (this.element == null) this.element = new ArrayList<ElementDefinition>(); return this.element; } public boolean hasElement() { if (this.element == null) return false; for (ElementDefinition item : this.element) if (!item.isEmpty()) return true; return false; } /** * @return {@link #element} (Captures constraints on each element within the resource.) */ // syntactic sugar public ElementDefinition addElement() { //3 ElementDefinition t = new ElementDefinition(); if (this.element == null) this.element = new ArrayList<ElementDefinition>(); this.element.add(t); return t; } // syntactic sugar public StructureDefinitionDifferentialComponent addElement(ElementDefinition t) { //3 if (t == null) return this; if (this.element == null) this.element = new ArrayList<ElementDefinition>(); this.element.add(t); return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("element", "ElementDefinition", "Captures constraints on each element within the resource.", 0, java.lang.Integer.MAX_VALUE, element)); } public StructureDefinitionDifferentialComponent copy() { StructureDefinitionDifferentialComponent dst = new StructureDefinitionDifferentialComponent(); copyValues(dst); if (element != null) { dst.element = new ArrayList<ElementDefinition>(); for (ElementDefinition i : element) dst.element.add(i.copy()); }; return dst; } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof StructureDefinitionDifferentialComponent)) return false; StructureDefinitionDifferentialComponent o = (StructureDefinitionDifferentialComponent) other; return compareDeep(element, o.element, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof StructureDefinitionDifferentialComponent)) return false; StructureDefinitionDifferentialComponent o = (StructureDefinitionDifferentialComponent) other; return true; } public boolean isEmpty() { return super.isEmpty() && (element == null || element.isEmpty()); } } /** * An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published. */ @Child(name = "url", type = {UriType.class}, order=0, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="Absolute URL used to reference this StructureDefinition", formalDefinition="An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published." ) protected UriType url; /** * Formal identifier that is used to identify this StructureDefinition when it is represented in other formats, or referenced in a specification, model, design or an instance (should be globally unique OID, UUID, or URI), (if it's not possible to use the literal URI). */ @Child(name = "identifier", type = {Identifier.class}, order=1, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Other identifiers for the StructureDefinition", formalDefinition="Formal identifier that is used to identify this StructureDefinition when it is represented in other formats, or referenced in a specification, model, design or an instance (should be globally unique OID, UUID, or URI), (if it's not possible to use the literal URI)." ) protected List<Identifier> identifier; /** * The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually. */ @Child(name = "version", type = {StringType.class}, order=2, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Logical id for this version of the StructureDefinition", formalDefinition="The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually." ) protected StringType version; /** * A free text natural language name identifying the StructureDefinition. */ @Child(name = "name", type = {StringType.class}, order=3, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="Informal name for this StructureDefinition", formalDefinition="A free text natural language name identifying the StructureDefinition." ) protected StringType name; /** * Defined so that applications can use this name when displaying the value of the extension to the user. */ @Child(name = "display", type = {StringType.class}, order=4, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Use this name when displaying the value", formalDefinition="Defined so that applications can use this name when displaying the value of the extension to the user." ) protected StringType display; /** * The status of the StructureDefinition. */ @Child(name = "status", type = {CodeType.class}, order=5, min=1, max=1, modifier=true, summary=true) @Description(shortDefinition="draft | active | retired", formalDefinition="The status of the StructureDefinition." ) protected Enumeration<ConformanceResourceStatus> status; /** * This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage. */ @Child(name = "experimental", type = {BooleanType.class}, order=6, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="If for testing purposes, not real usage", formalDefinition="This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage." ) protected BooleanType experimental; /** * The name of the individual or organization that published the structure definition. */ @Child(name = "publisher", type = {StringType.class}, order=7, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Name of the publisher (Organization or individual)", formalDefinition="The name of the individual or organization that published the structure definition." ) protected StringType publisher; /** * Contacts to assist a user in finding and communicating with the publisher. */ @Child(name = "contact", type = {}, order=8, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Contact details of the publisher", formalDefinition="Contacts to assist a user in finding and communicating with the publisher." ) protected List<StructureDefinitionContactComponent> contact; /** * The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes. */ @Child(name = "date", type = {DateTimeType.class}, order=9, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Date for this version of the StructureDefinition", formalDefinition="The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes." ) protected DateTimeType date; /** * A free text natural language description of the StructureDefinition and its use. */ @Child(name = "description", type = {StringType.class}, order=10, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Natural language description of the StructureDefinition", formalDefinition="A free text natural language description of the StructureDefinition and its use." ) protected StringType description; /** * The content was developed with a focus and intent of supporting the contexts that are listed. These terms may be used to assist with indexing and searching of structure definitions. */ @Child(name = "useContext", type = {CodeableConcept.class}, order=11, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Content intends to support these contexts", formalDefinition="The content was developed with a focus and intent of supporting the contexts that are listed. These terms may be used to assist with indexing and searching of structure definitions." ) protected List<CodeableConcept> useContext; /** * Explains why this structure definition is needed and why it's been constrained as it has. */ @Child(name = "requirements", type = {StringType.class}, order=12, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Scope and Usage this structure definition is for", formalDefinition="Explains why this structure definition is needed and why it's been constrained as it has." ) protected StringType requirements; /** * A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings. */ @Child(name = "copyright", type = {StringType.class}, order=13, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Use and/or publishing restrictions", formalDefinition="A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings." ) protected StringType copyright; /** * A set of terms from external terminologies that may be used to assist with indexing and searching of templates. */ @Child(name = "code", type = {Coding.class}, order=14, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Assist with indexing and finding", formalDefinition="A set of terms from external terminologies that may be used to assist with indexing and searching of templates." ) protected List<Coding> code; /** * The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version. */ @Child(name = "fhirVersion", type = {IdType.class}, order=15, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="FHIR Version this StructureDefinition targets", formalDefinition="The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version." ) protected IdType fhirVersion; /** * An external specification that the content is mapped to. */ @Child(name = "mapping", type = {}, order=16, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=false) @Description(shortDefinition="External specification that the content is mapped to", formalDefinition="An external specification that the content is mapped to." ) protected List<StructureDefinitionMappingComponent> mapping; /** * Defines the kind of structure that this definition is describing. */ @Child(name = "kind", type = {CodeType.class}, order=17, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="datatype | resource | logical", formalDefinition="Defines the kind of structure that this definition is describing." ) protected Enumeration<StructureDefinitionKind> kind; /** * The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure. */ @Child(name = "constrainedType", type = {CodeType.class}, order=18, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Any datatype or resource, including abstract ones", formalDefinition="The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure." ) protected CodeType constrainedType; /** * Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type. */ @Child(name = "abstract", type = {BooleanType.class}, order=19, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="Whether the structure is abstract", formalDefinition="Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type." ) protected BooleanType abstract_; /** * If this is an extension, Identifies the context within FHIR resources where the extension can be used. */ @Child(name = "contextType", type = {CodeType.class}, order=20, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="resource | datatype | mapping | extension", formalDefinition="If this is an extension, Identifies the context within FHIR resources where the extension can be used." ) protected Enumeration<ExtensionContext> contextType; /** * Identifies the types of resource or data type elements to which the extension can be applied. */ @Child(name = "context", type = {StringType.class}, order=21, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Where the extension can be used in instances", formalDefinition="Identifies the types of resource or data type elements to which the extension can be applied." ) protected List<StringType> context; /** * An absolute URI that is the base structure from which this set of constraints is derived. */ @Child(name = "base", type = {UriType.class}, order=22, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Structure that this set of constraints applies to", formalDefinition="An absolute URI that is the base structure from which this set of constraints is derived." ) protected UriType base; /** * A snapshot view is expressed in a stand alone form that can be used and interpreted without considering the base StructureDefinition. */ @Child(name = "snapshot", type = {}, order=23, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Snapshot view of the structure", formalDefinition="A snapshot view is expressed in a stand alone form that can be used and interpreted without considering the base StructureDefinition." ) protected StructureDefinitionSnapshotComponent snapshot; /** * A differential view is expressed relative to the base StructureDefinition - a statement of differences that it applies. */ @Child(name = "differential", type = {}, order=24, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Differential view of the structure", formalDefinition="A differential view is expressed relative to the base StructureDefinition - a statement of differences that it applies." ) protected StructureDefinitionDifferentialComponent differential; private static final long serialVersionUID = -580779569L; /* * Constructor */ public StructureDefinition() { super(); } /* * Constructor */ public StructureDefinition(UriType url, StringType name, Enumeration<ConformanceResourceStatus> status, Enumeration<StructureDefinitionKind> kind, BooleanType abstract_) { super(); this.url = url; this.name = name; this.status = status; this.kind = kind; this.abstract_ = abstract_; } /** * @return {@link #url} (An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published.). This is the underlying object with id, value and extensions. The accessor "getUrl" gives direct access to the value */ public UriType getUrlElement() { if (this.url == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.url"); else if (Configuration.doAutoCreate()) this.url = new UriType(); // bb return this.url; } public boolean hasUrlElement() { return this.url != null && !this.url.isEmpty(); } public boolean hasUrl() { return this.url != null && !this.url.isEmpty(); } /** * @param value {@link #url} (An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published.). This is the underlying object with id, value and extensions. The accessor "getUrl" gives direct access to the value */ public StructureDefinition setUrlElement(UriType value) { this.url = value; return this; } /** * @return An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published. */ public String getUrl() { return this.url == null ? null : this.url.getValue(); } /** * @param value An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published. */ public StructureDefinition setUrl(String value) { if (this.url == null) this.url = new UriType(); this.url.setValue(value); return this; } /** * @return {@link #identifier} (Formal identifier that is used to identify this StructureDefinition when it is represented in other formats, or referenced in a specification, model, design or an instance (should be globally unique OID, UUID, or URI), (if it's not possible to use the literal URI).) */ public List<Identifier> getIdentifier() { if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); return this.identifier; } public boolean hasIdentifier() { if (this.identifier == null) return false; for (Identifier item : this.identifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #identifier} (Formal identifier that is used to identify this StructureDefinition when it is represented in other formats, or referenced in a specification, model, design or an instance (should be globally unique OID, UUID, or URI), (if it's not possible to use the literal URI).) */ // syntactic sugar public Identifier addIdentifier() { //3 Identifier t = new Identifier(); if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return t; } // syntactic sugar public StructureDefinition addIdentifier(Identifier t) { //3 if (t == null) return this; if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return this; } /** * @return {@link #version} (The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually.). This is the underlying object with id, value and extensions. The accessor "getVersion" gives direct access to the value */ public StringType getVersionElement() { if (this.version == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.version"); else if (Configuration.doAutoCreate()) this.version = new StringType(); // bb return this.version; } public boolean hasVersionElement() { return this.version != null && !this.version.isEmpty(); } public boolean hasVersion() { return this.version != null && !this.version.isEmpty(); } /** * @param value {@link #version} (The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually.). This is the underlying object with id, value and extensions. The accessor "getVersion" gives direct access to the value */ public StructureDefinition setVersionElement(StringType value) { this.version = value; return this; } /** * @return The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually. */ public String getVersion() { return this.version == null ? null : this.version.getValue(); } /** * @param value The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually. */ public StructureDefinition setVersion(String value) { if (Utilities.noString(value)) this.version = null; else { if (this.version == null) this.version = new StringType(); this.version.setValue(value); } return this; } /** * @return {@link #name} (A free text natural language name identifying the StructureDefinition.). This is the underlying object with id, value and extensions. The accessor "getName" gives direct access to the value */ public StringType getNameElement() { if (this.name == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.name"); else if (Configuration.doAutoCreate()) this.name = new StringType(); // bb return this.name; } public boolean hasNameElement() { return this.name != null && !this.name.isEmpty(); } public boolean hasName() { return this.name != null && !this.name.isEmpty(); } /** * @param value {@link #name} (A free text natural language name identifying the StructureDefinition.). This is the underlying object with id, value and extensions. The accessor "getName" gives direct access to the value */ public StructureDefinition setNameElement(StringType value) { this.name = value; return this; } /** * @return A free text natural language name identifying the StructureDefinition. */ public String getName() { return this.name == null ? null : this.name.getValue(); } /** * @param value A free text natural language name identifying the StructureDefinition. */ public StructureDefinition setName(String value) { if (this.name == null) this.name = new StringType(); this.name.setValue(value); return this; } /** * @return {@link #display} (Defined so that applications can use this name when displaying the value of the extension to the user.). This is the underlying object with id, value and extensions. The accessor "getDisplay" gives direct access to the value */ public StringType getDisplayElement() { if (this.display == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.display"); else if (Configuration.doAutoCreate()) this.display = new StringType(); // bb return this.display; } public boolean hasDisplayElement() { return this.display != null && !this.display.isEmpty(); } public boolean hasDisplay() { return this.display != null && !this.display.isEmpty(); } /** * @param value {@link #display} (Defined so that applications can use this name when displaying the value of the extension to the user.). This is the underlying object with id, value and extensions. The accessor "getDisplay" gives direct access to the value */ public StructureDefinition setDisplayElement(StringType value) { this.display = value; return this; } /** * @return Defined so that applications can use this name when displaying the value of the extension to the user. */ public String getDisplay() { return this.display == null ? null : this.display.getValue(); } /** * @param value Defined so that applications can use this name when displaying the value of the extension to the user. */ public StructureDefinition setDisplay(String value) { if (Utilities.noString(value)) this.display = null; else { if (this.display == null) this.display = new StringType(); this.display.setValue(value); } return this; } /** * @return {@link #status} (The status of the StructureDefinition.). This is the underlying object with id, value and extensions. The accessor "getStatus" gives direct access to the value */ public Enumeration<ConformanceResourceStatus> getStatusElement() { if (this.status == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.status"); else if (Configuration.doAutoCreate()) this.status = new Enumeration<ConformanceResourceStatus>(new ConformanceResourceStatusEnumFactory()); // bb return this.status; } public boolean hasStatusElement() { return this.status != null && !this.status.isEmpty(); } public boolean hasStatus() { return this.status != null && !this.status.isEmpty(); } /** * @param value {@link #status} (The status of the StructureDefinition.). This is the underlying object with id, value and extensions. The accessor "getStatus" gives direct access to the value */ public StructureDefinition setStatusElement(Enumeration<ConformanceResourceStatus> value) { this.status = value; return this; } /** * @return The status of the StructureDefinition. */ public ConformanceResourceStatus getStatus() { return this.status == null ? null : this.status.getValue(); } /** * @param value The status of the StructureDefinition. */ public StructureDefinition setStatus(ConformanceResourceStatus value) { if (this.status == null) this.status = new Enumeration<ConformanceResourceStatus>(new ConformanceResourceStatusEnumFactory()); this.status.setValue(value); return this; } /** * @return {@link #experimental} (This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage.). This is the underlying object with id, value and extensions. The accessor "getExperimental" gives direct access to the value */ public BooleanType getExperimentalElement() { if (this.experimental == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.experimental"); else if (Configuration.doAutoCreate()) this.experimental = new BooleanType(); // bb return this.experimental; } public boolean hasExperimentalElement() { return this.experimental != null && !this.experimental.isEmpty(); } public boolean hasExperimental() { return this.experimental != null && !this.experimental.isEmpty(); } /** * @param value {@link #experimental} (This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage.). This is the underlying object with id, value and extensions. The accessor "getExperimental" gives direct access to the value */ public StructureDefinition setExperimentalElement(BooleanType value) { this.experimental = value; return this; } /** * @return This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage. */ public boolean getExperimental() { return this.experimental == null || this.experimental.isEmpty() ? false : this.experimental.getValue(); } /** * @param value This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage. */ public StructureDefinition setExperimental(boolean value) { if (this.experimental == null) this.experimental = new BooleanType(); this.experimental.setValue(value); return this; } /** * @return {@link #publisher} (The name of the individual or organization that published the structure definition.). This is the underlying object with id, value and extensions. The accessor "getPublisher" gives direct access to the value */ public StringType getPublisherElement() { if (this.publisher == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.publisher"); else if (Configuration.doAutoCreate()) this.publisher = new StringType(); // bb return this.publisher; } public boolean hasPublisherElement() { return this.publisher != null && !this.publisher.isEmpty(); } public boolean hasPublisher() { return this.publisher != null && !this.publisher.isEmpty(); } /** * @param value {@link #publisher} (The name of the individual or organization that published the structure definition.). This is the underlying object with id, value and extensions. The accessor "getPublisher" gives direct access to the value */ public StructureDefinition setPublisherElement(StringType value) { this.publisher = value; return this; } /** * @return The name of the individual or organization that published the structure definition. */ public String getPublisher() { return this.publisher == null ? null : this.publisher.getValue(); } /** * @param value The name of the individual or organization that published the structure definition. */ public StructureDefinition setPublisher(String value) { if (Utilities.noString(value)) this.publisher = null; else { if (this.publisher == null) this.publisher = new StringType(); this.publisher.setValue(value); } return this; } /** * @return {@link #contact} (Contacts to assist a user in finding and communicating with the publisher.) */ public List<StructureDefinitionContactComponent> getContact() { if (this.contact == null) this.contact = new ArrayList<StructureDefinitionContactComponent>(); return this.contact; } public boolean hasContact() { if (this.contact == null) return false; for (StructureDefinitionContactComponent item : this.contact) if (!item.isEmpty()) return true; return false; } /** * @return {@link #contact} (Contacts to assist a user in finding and communicating with the publisher.) */ // syntactic sugar public StructureDefinitionContactComponent addContact() { //3 StructureDefinitionContactComponent t = new StructureDefinitionContactComponent(); if (this.contact == null) this.contact = new ArrayList<StructureDefinitionContactComponent>(); this.contact.add(t); return t; } // syntactic sugar public StructureDefinition addContact(StructureDefinitionContactComponent t) { //3 if (t == null) return this; if (this.contact == null) this.contact = new ArrayList<StructureDefinitionContactComponent>(); this.contact.add(t); return this; } /** * @return {@link #date} (The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes.). This is the underlying object with id, value and extensions. The accessor "getDate" gives direct access to the value */ public DateTimeType getDateElement() { if (this.date == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.date"); else if (Configuration.doAutoCreate()) this.date = new DateTimeType(); // bb return this.date; } public boolean hasDateElement() { return this.date != null && !this.date.isEmpty(); } public boolean hasDate() { return this.date != null && !this.date.isEmpty(); } /** * @param value {@link #date} (The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes.). This is the underlying object with id, value and extensions. The accessor "getDate" gives direct access to the value */ public StructureDefinition setDateElement(DateTimeType value) { this.date = value; return this; } /** * @return The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes. */ public Date getDate() { return this.date == null ? null : this.date.getValue(); } /** * @param value The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes. */ public StructureDefinition setDate(Date value) { if (value == null) this.date = null; else { if (this.date == null) this.date = new DateTimeType(); this.date.setValue(value); } return this; } /** * @return {@link #description} (A free text natural language description of the StructureDefinition and its use.). This is the underlying object with id, value and extensions. The accessor "getDescription" gives direct access to the value */ public StringType getDescriptionElement() { if (this.description == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.description"); else if (Configuration.doAutoCreate()) this.description = new StringType(); // bb return this.description; } public boolean hasDescriptionElement() { return this.description != null && !this.description.isEmpty(); } public boolean hasDescription() { return this.description != null && !this.description.isEmpty(); } /** * @param value {@link #description} (A free text natural language description of the StructureDefinition and its use.). This is the underlying object with id, value and extensions. The accessor "getDescription" gives direct access to the value */ public StructureDefinition setDescriptionElement(StringType value) { this.description = value; return this; } /** * @return A free text natural language description of the StructureDefinition and its use. */ public String getDescription() { return this.description == null ? null : this.description.getValue(); } /** * @param value A free text natural language description of the StructureDefinition and its use. */ public StructureDefinition setDescription(String value) { if (Utilities.noString(value)) this.description = null; else { if (this.description == null) this.description = new StringType(); this.description.setValue(value); } return this; } /** * @return {@link #useContext} (The content was developed with a focus and intent of supporting the contexts that are listed. These terms may be used to assist with indexing and searching of structure definitions.) */ public List<CodeableConcept> getUseContext() { if (this.useContext == null) this.useContext = new ArrayList<CodeableConcept>(); return this.useContext; } public boolean hasUseContext() { if (this.useContext == null) return false; for (CodeableConcept item : this.useContext) if (!item.isEmpty()) return true; return false; } /** * @return {@link #useContext} (The content was developed with a focus and intent of supporting the contexts that are listed. These terms may be used to assist with indexing and searching of structure definitions.) */ // syntactic sugar public CodeableConcept addUseContext() { //3 CodeableConcept t = new CodeableConcept(); if (this.useContext == null) this.useContext = new ArrayList<CodeableConcept>(); this.useContext.add(t); return t; } // syntactic sugar public StructureDefinition addUseContext(CodeableConcept t) { //3 if (t == null) return this; if (this.useContext == null) this.useContext = new ArrayList<CodeableConcept>(); this.useContext.add(t); return this; } /** * @return {@link #requirements} (Explains why this structure definition is needed and why it's been constrained as it has.). This is the underlying object with id, value and extensions. The accessor "getRequirements" gives direct access to the value */ public StringType getRequirementsElement() { if (this.requirements == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.requirements"); else if (Configuration.doAutoCreate()) this.requirements = new StringType(); // bb return this.requirements; } public boolean hasRequirementsElement() { return this.requirements != null && !this.requirements.isEmpty(); } public boolean hasRequirements() { return this.requirements != null && !this.requirements.isEmpty(); } /** * @param value {@link #requirements} (Explains why this structure definition is needed and why it's been constrained as it has.). This is the underlying object with id, value and extensions. The accessor "getRequirements" gives direct access to the value */ public StructureDefinition setRequirementsElement(StringType value) { this.requirements = value; return this; } /** * @return Explains why this structure definition is needed and why it's been constrained as it has. */ public String getRequirements() { return this.requirements == null ? null : this.requirements.getValue(); } /** * @param value Explains why this structure definition is needed and why it's been constrained as it has. */ public StructureDefinition setRequirements(String value) { if (Utilities.noString(value)) this.requirements = null; else { if (this.requirements == null) this.requirements = new StringType(); this.requirements.setValue(value); } return this; } /** * @return {@link #copyright} (A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings.). This is the underlying object with id, value and extensions. The accessor "getCopyright" gives direct access to the value */ public StringType getCopyrightElement() { if (this.copyright == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.copyright"); else if (Configuration.doAutoCreate()) this.copyright = new StringType(); // bb return this.copyright; } public boolean hasCopyrightElement() { return this.copyright != null && !this.copyright.isEmpty(); } public boolean hasCopyright() { return this.copyright != null && !this.copyright.isEmpty(); } /** * @param value {@link #copyright} (A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings.). This is the underlying object with id, value and extensions. The accessor "getCopyright" gives direct access to the value */ public StructureDefinition setCopyrightElement(StringType value) { this.copyright = value; return this; } /** * @return A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings. */ public String getCopyright() { return this.copyright == null ? null : this.copyright.getValue(); } /** * @param value A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings. */ public StructureDefinition setCopyright(String value) { if (Utilities.noString(value)) this.copyright = null; else { if (this.copyright == null) this.copyright = new StringType(); this.copyright.setValue(value); } return this; } /** * @return {@link #code} (A set of terms from external terminologies that may be used to assist with indexing and searching of templates.) */ public List<Coding> getCode() { if (this.code == null) this.code = new ArrayList<Coding>(); return this.code; } public boolean hasCode() { if (this.code == null) return false; for (Coding item : this.code) if (!item.isEmpty()) return true; return false; } /** * @return {@link #code} (A set of terms from external terminologies that may be used to assist with indexing and searching of templates.) */ // syntactic sugar public Coding addCode() { //3 Coding t = new Coding(); if (this.code == null) this.code = new ArrayList<Coding>(); this.code.add(t); return t; } // syntactic sugar public StructureDefinition addCode(Coding t) { //3 if (t == null) return this; if (this.code == null) this.code = new ArrayList<Coding>(); this.code.add(t); return this; } /** * @return {@link #fhirVersion} (The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version.). This is the underlying object with id, value and extensions. The accessor "getFhirVersion" gives direct access to the value */ public IdType getFhirVersionElement() { if (this.fhirVersion == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.fhirVersion"); else if (Configuration.doAutoCreate()) this.fhirVersion = new IdType(); // bb return this.fhirVersion; } public boolean hasFhirVersionElement() { return this.fhirVersion != null && !this.fhirVersion.isEmpty(); } public boolean hasFhirVersion() { return this.fhirVersion != null && !this.fhirVersion.isEmpty(); } /** * @param value {@link #fhirVersion} (The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version.). This is the underlying object with id, value and extensions. The accessor "getFhirVersion" gives direct access to the value */ public StructureDefinition setFhirVersionElement(IdType value) { this.fhirVersion = value; return this; } /** * @return The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version. */ public String getFhirVersion() { return this.fhirVersion == null ? null : this.fhirVersion.getValue(); } /** * @param value The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version. */ public StructureDefinition setFhirVersion(String value) { if (Utilities.noString(value)) this.fhirVersion = null; else { if (this.fhirVersion == null) this.fhirVersion = new IdType(); this.fhirVersion.setValue(value); } return this; } /** * @return {@link #mapping} (An external specification that the content is mapped to.) */ public List<StructureDefinitionMappingComponent> getMapping() { if (this.mapping == null) this.mapping = new ArrayList<StructureDefinitionMappingComponent>(); return this.mapping; } public boolean hasMapping() { if (this.mapping == null) return false; for (StructureDefinitionMappingComponent item : this.mapping) if (!item.isEmpty()) return true; return false; } /** * @return {@link #mapping} (An external specification that the content is mapped to.) */ // syntactic sugar public StructureDefinitionMappingComponent addMapping() { //3 StructureDefinitionMappingComponent t = new StructureDefinitionMappingComponent(); if (this.mapping == null) this.mapping = new ArrayList<StructureDefinitionMappingComponent>(); this.mapping.add(t); return t; } // syntactic sugar public StructureDefinition addMapping(StructureDefinitionMappingComponent t) { //3 if (t == null) return this; if (this.mapping == null) this.mapping = new ArrayList<StructureDefinitionMappingComponent>(); this.mapping.add(t); return this; } /** * @return {@link #kind} (Defines the kind of structure that this definition is describing.). This is the underlying object with id, value and extensions. The accessor "getKind" gives direct access to the value */ public Enumeration<StructureDefinitionKind> getKindElement() { if (this.kind == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.kind"); else if (Configuration.doAutoCreate()) this.kind = new Enumeration<StructureDefinitionKind>(new StructureDefinitionKindEnumFactory()); // bb return this.kind; } public boolean hasKindElement() { return this.kind != null && !this.kind.isEmpty(); } public boolean hasKind() { return this.kind != null && !this.kind.isEmpty(); } /** * @param value {@link #kind} (Defines the kind of structure that this definition is describing.). This is the underlying object with id, value and extensions. The accessor "getKind" gives direct access to the value */ public StructureDefinition setKindElement(Enumeration<StructureDefinitionKind> value) { this.kind = value; return this; } /** * @return Defines the kind of structure that this definition is describing. */ public StructureDefinitionKind getKind() { return this.kind == null ? null : this.kind.getValue(); } /** * @param value Defines the kind of structure that this definition is describing. */ public StructureDefinition setKind(StructureDefinitionKind value) { if (this.kind == null) this.kind = new Enumeration<StructureDefinitionKind>(new StructureDefinitionKindEnumFactory()); this.kind.setValue(value); return this; } /** * @return {@link #constrainedType} (The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure.). This is the underlying object with id, value and extensions. The accessor "getConstrainedType" gives direct access to the value */ public CodeType getConstrainedTypeElement() { if (this.constrainedType == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.constrainedType"); else if (Configuration.doAutoCreate()) this.constrainedType = new CodeType(); // bb return this.constrainedType; } public boolean hasConstrainedTypeElement() { return this.constrainedType != null && !this.constrainedType.isEmpty(); } public boolean hasConstrainedType() { return this.constrainedType != null && !this.constrainedType.isEmpty(); } /** * @param value {@link #constrainedType} (The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure.). This is the underlying object with id, value and extensions. The accessor "getConstrainedType" gives direct access to the value */ public StructureDefinition setConstrainedTypeElement(CodeType value) { this.constrainedType = value; return this; } /** * @return The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure. */ public String getConstrainedType() { return this.constrainedType == null ? null : this.constrainedType.getValue(); } /** * @param value The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure. */ public StructureDefinition setConstrainedType(String value) { if (Utilities.noString(value)) this.constrainedType = null; else { if (this.constrainedType == null) this.constrainedType = new CodeType(); this.constrainedType.setValue(value); } return this; } /** * @return {@link #abstract_} (Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type.). This is the underlying object with id, value and extensions. The accessor "getAbstract" gives direct access to the value */ public BooleanType getAbstractElement() { if (this.abstract_ == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.abstract_"); else if (Configuration.doAutoCreate()) this.abstract_ = new BooleanType(); // bb return this.abstract_; } public boolean hasAbstractElement() { return this.abstract_ != null && !this.abstract_.isEmpty(); } public boolean hasAbstract() { return this.abstract_ != null && !this.abstract_.isEmpty(); } /** * @param value {@link #abstract_} (Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type.). This is the underlying object with id, value and extensions. The accessor "getAbstract" gives direct access to the value */ public StructureDefinition setAbstractElement(BooleanType value) { this.abstract_ = value; return this; } /** * @return Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type. */ public boolean getAbstract() { return this.abstract_ == null || this.abstract_.isEmpty() ? false : this.abstract_.getValue(); } /** * @param value Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type. */ public StructureDefinition setAbstract(boolean value) { if (this.abstract_ == null) this.abstract_ = new BooleanType(); this.abstract_.setValue(value); return this; } /** * @return {@link #contextType} (If this is an extension, Identifies the context within FHIR resources where the extension can be used.). This is the underlying object with id, value and extensions. The accessor "getContextType" gives direct access to the value */ public Enumeration<ExtensionContext> getContextTypeElement() { if (this.contextType == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.contextType"); else if (Configuration.doAutoCreate()) this.contextType = new Enumeration<ExtensionContext>(new ExtensionContextEnumFactory()); // bb return this.contextType; } public boolean hasContextTypeElement() { return this.contextType != null && !this.contextType.isEmpty(); } public boolean hasContextType() { return this.contextType != null && !this.contextType.isEmpty(); } /** * @param value {@link #contextType} (If this is an extension, Identifies the context within FHIR resources where the extension can be used.). This is the underlying object with id, value and extensions. The accessor "getContextType" gives direct access to the value */ public StructureDefinition setContextTypeElement(Enumeration<ExtensionContext> value) { this.contextType = value; return this; } /** * @return If this is an extension, Identifies the context within FHIR resources where the extension can be used. */ public ExtensionContext getContextType() { return this.contextType == null ? null : this.contextType.getValue(); } /** * @param value If this is an extension, Identifies the context within FHIR resources where the extension can be used. */ public StructureDefinition setContextType(ExtensionContext value) { if (value == null) this.contextType = null; else { if (this.contextType == null) this.contextType = new Enumeration<ExtensionContext>(new ExtensionContextEnumFactory()); this.contextType.setValue(value); } return this; } /** * @return {@link #context} (Identifies the types of resource or data type elements to which the extension can be applied.) */ public List<StringType> getContext() { if (this.context == null) this.context = new ArrayList<StringType>(); return this.context; } public boolean hasContext() { if (this.context == null) return false; for (StringType item : this.context) if (!item.isEmpty()) return true; return false; } /** * @return {@link #context} (Identifies the types of resource or data type elements to which the extension can be applied.) */ // syntactic sugar public StringType addContextElement() {//2 StringType t = new StringType(); if (this.context == null) this.context = new ArrayList<StringType>(); this.context.add(t); return t; } /** * @param value {@link #context} (Identifies the types of resource or data type elements to which the extension can be applied.) */ public StructureDefinition addContext(String value) { //1 StringType t = new StringType(); t.setValue(value); if (this.context == null) this.context = new ArrayList<StringType>(); this.context.add(t); return this; } /** * @param value {@link #context} (Identifies the types of resource or data type elements to which the extension can be applied.) */ public boolean hasContext(String value) { if (this.context == null) return false; for (StringType v : this.context) if (v.equals(value)) // string return true; return false; } /** * @return {@link #base} (An absolute URI that is the base structure from which this set of constraints is derived.). This is the underlying object with id, value and extensions. The accessor "getBase" gives direct access to the value */ public UriType getBaseElement() { if (this.base == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.base"); else if (Configuration.doAutoCreate()) this.base = new UriType(); // bb return this.base; } public boolean hasBaseElement() { return this.base != null && !this.base.isEmpty(); } public boolean hasBase() { return this.base != null && !this.base.isEmpty(); } /** * @param value {@link #base} (An absolute URI that is the base structure from which this set of constraints is derived.). This is the underlying object with id, value and extensions. The accessor "getBase" gives direct access to the value */ public StructureDefinition setBaseElement(UriType value) { this.base = value; return this; } /** * @return An absolute URI that is the base structure from which this set of constraints is derived. */ public String getBase() { return this.base == null ? null : this.base.getValue(); } /** * @param value An absolute URI that is the base structure from which this set of constraints is derived. */ public StructureDefinition setBase(String value) { if (Utilities.noString(value)) this.base = null; else { if (this.base == null) this.base = new UriType(); this.base.setValue(value); } return this; } /** * @return {@link #snapshot} (A snapshot view is expressed in a stand alone form that can be used and interpreted without considering the base StructureDefinition.) */ public StructureDefinitionSnapshotComponent getSnapshot() { if (this.snapshot == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.snapshot"); else if (Configuration.doAutoCreate()) this.snapshot = new StructureDefinitionSnapshotComponent(); // cc return this.snapshot; } public boolean hasSnapshot() { return this.snapshot != null && !this.snapshot.isEmpty(); } /** * @param value {@link #snapshot} (A snapshot view is expressed in a stand alone form that can be used and interpreted without considering the base StructureDefinition.) */ public StructureDefinition setSnapshot(StructureDefinitionSnapshotComponent value) { this.snapshot = value; return this; } /** * @return {@link #differential} (A differential view is expressed relative to the base StructureDefinition - a statement of differences that it applies.) */ public StructureDefinitionDifferentialComponent getDifferential() { if (this.differential == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create StructureDefinition.differential"); else if (Configuration.doAutoCreate()) this.differential = new StructureDefinitionDifferentialComponent(); // cc return this.differential; } public boolean hasDifferential() { return this.differential != null && !this.differential.isEmpty(); } /** * @param value {@link #differential} (A differential view is expressed relative to the base StructureDefinition - a statement of differences that it applies.) */ public StructureDefinition setDifferential(StructureDefinitionDifferentialComponent value) { this.differential = value; return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("url", "uri", "An absolute URL that is used to identify this structure definition when it is referenced in a specification, model, design or an instance. This SHALL be a URL, SHOULD be globally unique, and SHOULD be an address at which this structure definition is (or will be) published.", 0, java.lang.Integer.MAX_VALUE, url)); childrenList.add(new Property("identifier", "Identifier", "Formal identifier that is used to identify this StructureDefinition when it is represented in other formats, or referenced in a specification, model, design or an instance (should be globally unique OID, UUID, or URI), (if it's not possible to use the literal URI).", 0, java.lang.Integer.MAX_VALUE, identifier)); childrenList.add(new Property("version", "string", "The identifier that is used to identify this version of the StructureDefinition when it is referenced in a specification, model, design or instance. This is an arbitrary value managed by the StructureDefinition author manually.", 0, java.lang.Integer.MAX_VALUE, version)); childrenList.add(new Property("name", "string", "A free text natural language name identifying the StructureDefinition.", 0, java.lang.Integer.MAX_VALUE, name)); childrenList.add(new Property("display", "string", "Defined so that applications can use this name when displaying the value of the extension to the user.", 0, java.lang.Integer.MAX_VALUE, display)); childrenList.add(new Property("status", "code", "The status of the StructureDefinition.", 0, java.lang.Integer.MAX_VALUE, status)); childrenList.add(new Property("experimental", "boolean", "This StructureDefinition was authored for testing purposes (or education/evaluation/marketing), and is not intended to be used for genuine usage.", 0, java.lang.Integer.MAX_VALUE, experimental)); childrenList.add(new Property("publisher", "string", "The name of the individual or organization that published the structure definition.", 0, java.lang.Integer.MAX_VALUE, publisher)); childrenList.add(new Property("contact", "", "Contacts to assist a user in finding and communicating with the publisher.", 0, java.lang.Integer.MAX_VALUE, contact)); childrenList.add(new Property("date", "dateTime", "The date this version of the structure definition was published. The date must change when the business version changes, if it does, and it must change if the status code changes. In addition, it should change when the substantive content of the structure definition changes.", 0, java.lang.Integer.MAX_VALUE, date)); childrenList.add(new Property("description", "string", "A free text natural language description of the StructureDefinition and its use.", 0, java.lang.Integer.MAX_VALUE, description)); childrenList.add(new Property("useContext", "CodeableConcept", "The content was developed with a focus and intent of supporting the contexts that are listed. These terms may be used to assist with indexing and searching of structure definitions.", 0, java.lang.Integer.MAX_VALUE, useContext)); childrenList.add(new Property("requirements", "string", "Explains why this structure definition is needed and why it's been constrained as it has.", 0, java.lang.Integer.MAX_VALUE, requirements)); childrenList.add(new Property("copyright", "string", "A copyright statement relating to the structure definition and/or its contents. Copyright statements are generally legal restrictions on the use and publishing of the details of the constraints and mappings.", 0, java.lang.Integer.MAX_VALUE, copyright)); childrenList.add(new Property("code", "Coding", "A set of terms from external terminologies that may be used to assist with indexing and searching of templates.", 0, java.lang.Integer.MAX_VALUE, code)); childrenList.add(new Property("fhirVersion", "id", "The version of the FHIR specification on which this StructureDefinition is based - this is the formal version of the specification, without the revision number, e.g. [publication].[major].[minor], which is 1.0.2 for this version.", 0, java.lang.Integer.MAX_VALUE, fhirVersion)); childrenList.add(new Property("mapping", "", "An external specification that the content is mapped to.", 0, java.lang.Integer.MAX_VALUE, mapping)); childrenList.add(new Property("kind", "code", "Defines the kind of structure that this definition is describing.", 0, java.lang.Integer.MAX_VALUE, kind)); childrenList.add(new Property("constrainedType", "code", "The type of type that is being constrained - a data type, an extension, a resource, including abstract ones. If this field is present, it indicates that the structure definition is a constraint. If it is not present, then the structure definition is the definition of a base structure.", 0, java.lang.Integer.MAX_VALUE, constrainedType)); childrenList.add(new Property("abstract", "boolean", "Whether structure this definition describes is abstract or not - that is, whether an actual exchanged item can ever be of this type.", 0, java.lang.Integer.MAX_VALUE, abstract_)); childrenList.add(new Property("contextType", "code", "If this is an extension, Identifies the context within FHIR resources where the extension can be used.", 0, java.lang.Integer.MAX_VALUE, contextType)); childrenList.add(new Property("context", "string", "Identifies the types of resource or data type elements to which the extension can be applied.", 0, java.lang.Integer.MAX_VALUE, context)); childrenList.add(new Property("base", "uri", "An absolute URI that is the base structure from which this set of constraints is derived.", 0, java.lang.Integer.MAX_VALUE, base)); childrenList.add(new Property("snapshot", "", "A snapshot view is expressed in a stand alone form that can be used and interpreted without considering the base StructureDefinition.", 0, java.lang.Integer.MAX_VALUE, snapshot)); childrenList.add(new Property("differential", "", "A differential view is expressed relative to the base StructureDefinition - a statement of differences that it applies.", 0, java.lang.Integer.MAX_VALUE, differential)); } public StructureDefinition copy() { StructureDefinition dst = new StructureDefinition(); copyValues(dst); dst.url = url == null ? null : url.copy(); if (identifier != null) { dst.identifier = new ArrayList<Identifier>(); for (Identifier i : identifier) dst.identifier.add(i.copy()); }; dst.version = version == null ? null : version.copy(); dst.name = name == null ? null : name.copy(); dst.display = display == null ? null : display.copy(); dst.status = status == null ? null : status.copy(); dst.experimental = experimental == null ? null : experimental.copy(); dst.publisher = publisher == null ? null : publisher.copy(); if (contact != null) { dst.contact = new ArrayList<StructureDefinitionContactComponent>(); for (StructureDefinitionContactComponent i : contact) dst.contact.add(i.copy()); }; dst.date = date == null ? null : date.copy(); dst.description = description == null ? null : description.copy(); if (useContext != null) { dst.useContext = new ArrayList<CodeableConcept>(); for (CodeableConcept i : useContext) dst.useContext.add(i.copy()); }; dst.requirements = requirements == null ? null : requirements.copy(); dst.copyright = copyright == null ? null : copyright.copy(); if (code != null) { dst.code = new ArrayList<Coding>(); for (Coding i : code) dst.code.add(i.copy()); }; dst.fhirVersion = fhirVersion == null ? null : fhirVersion.copy(); if (mapping != null) { dst.mapping = new ArrayList<StructureDefinitionMappingComponent>(); for (StructureDefinitionMappingComponent i : mapping) dst.mapping.add(i.copy()); }; dst.kind = kind == null ? null : kind.copy(); dst.constrainedType = constrainedType == null ? null : constrainedType.copy(); dst.abstract_ = abstract_ == null ? null : abstract_.copy(); dst.contextType = contextType == null ? null : contextType.copy(); if (context != null) { dst.context = new ArrayList<StringType>(); for (StringType i : context) dst.context.add(i.copy()); }; dst.base = base == null ? null : base.copy(); dst.snapshot = snapshot == null ? null : snapshot.copy(); dst.differential = differential == null ? null : differential.copy(); return dst; } protected StructureDefinition typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof StructureDefinition)) return false; StructureDefinition o = (StructureDefinition) other; return compareDeep(url, o.url, true) && compareDeep(identifier, o.identifier, true) && compareDeep(version, o.version, true) && compareDeep(name, o.name, true) && compareDeep(display, o.display, true) && compareDeep(status, o.status, true) && compareDeep(experimental, o.experimental, true) && compareDeep(publisher, o.publisher, true) && compareDeep(contact, o.contact, true) && compareDeep(date, o.date, true) && compareDeep(description, o.description, true) && compareDeep(useContext, o.useContext, true) && compareDeep(requirements, o.requirements, true) && compareDeep(copyright, o.copyright, true) && compareDeep(code, o.code, true) && compareDeep(fhirVersion, o.fhirVersion, true) && compareDeep(mapping, o.mapping, true) && compareDeep(kind, o.kind, true) && compareDeep(constrainedType, o.constrainedType, true) && compareDeep(abstract_, o.abstract_, true) && compareDeep(contextType, o.contextType, true) && compareDeep(context, o.context, true) && compareDeep(base, o.base, true) && compareDeep(snapshot, o.snapshot, true) && compareDeep(differential, o.differential, true) ; } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof StructureDefinition)) return false; StructureDefinition o = (StructureDefinition) other; return compareValues(url, o.url, true) && compareValues(version, o.version, true) && compareValues(name, o.name, true) && compareValues(display, o.display, true) && compareValues(status, o.status, true) && compareValues(experimental, o.experimental, true) && compareValues(publisher, o.publisher, true) && compareValues(date, o.date, true) && compareValues(description, o.description, true) && compareValues(requirements, o.requirements, true) && compareValues(copyright, o.copyright, true) && compareValues(fhirVersion, o.fhirVersion, true) && compareValues(kind, o.kind, true) && compareValues(constrainedType, o.constrainedType, true) && compareValues(abstract_, o.abstract_, true) && compareValues(contextType, o.contextType, true) && compareValues(context, o.context, true) && compareValues(base, o.base, true); } public boolean isEmpty() { return super.isEmpty() && (url == null || url.isEmpty()) && (identifier == null || identifier.isEmpty()) && (version == null || version.isEmpty()) && (name == null || name.isEmpty()) && (display == null || display.isEmpty()) && (status == null || status.isEmpty()) && (experimental == null || experimental.isEmpty()) && (publisher == null || publisher.isEmpty()) && (contact == null || contact.isEmpty()) && (date == null || date.isEmpty()) && (description == null || description.isEmpty()) && (useContext == null || useContext.isEmpty()) && (requirements == null || requirements.isEmpty()) && (copyright == null || copyright.isEmpty()) && (code == null || code.isEmpty()) && (fhirVersion == null || fhirVersion.isEmpty()) && (mapping == null || mapping.isEmpty()) && (kind == null || kind.isEmpty()) && (constrainedType == null || constrainedType.isEmpty()) && (abstract_ == null || abstract_.isEmpty()) && (contextType == null || contextType.isEmpty()) && (context == null || context.isEmpty()) && (base == null || base.isEmpty()) && (snapshot == null || snapshot.isEmpty()) && (differential == null || differential.isEmpty()); } @Override public ResourceType getResourceType() { return ResourceType.StructureDefinition; } @SearchParamDefinition(name="date", path="StructureDefinition.date", description="The profile publication date", type="date" ) public static final String SP_DATE = "date"; @SearchParamDefinition(name="identifier", path="StructureDefinition.identifier", description="The identifier of the profile", type="token" ) public static final String SP_IDENTIFIER = "identifier"; @SearchParamDefinition(name="code", path="StructureDefinition.code", description="A code for the profile", type="token" ) public static final String SP_CODE = "code"; @SearchParamDefinition(name="valueset", path="StructureDefinition.snapshot.element.binding.valueSet[x]", description="A vocabulary binding reference", type="reference" ) public static final String SP_VALUESET = "valueset"; @SearchParamDefinition(name="kind", path="StructureDefinition.kind", description="datatype | resource | logical", type="token" ) public static final String SP_KIND = "kind"; @SearchParamDefinition(name="display", path="StructureDefinition.display", description="Use this name when displaying the value", type="string" ) public static final String SP_DISPLAY = "display"; @SearchParamDefinition(name="description", path="StructureDefinition.description", description="Text search in the description of the profile", type="string" ) public static final String SP_DESCRIPTION = "description"; @SearchParamDefinition(name="experimental", path="StructureDefinition.experimental", description="If for testing purposes, not real usage", type="token" ) public static final String SP_EXPERIMENTAL = "experimental"; @SearchParamDefinition(name="context-type", path="StructureDefinition.contextType", description="resource | datatype | mapping | extension", type="token" ) public static final String SP_CONTEXTTYPE = "context-type"; @SearchParamDefinition(name="abstract", path="StructureDefinition.abstract", description="Whether the structure is abstract", type="token" ) public static final String SP_ABSTRACT = "abstract"; @SearchParamDefinition(name="type", path="StructureDefinition.constrainedType", description="Any datatype or resource, including abstract ones", type="token" ) public static final String SP_TYPE = "type"; @SearchParamDefinition(name="version", path="StructureDefinition.version", description="The version identifier of the profile", type="token" ) public static final String SP_VERSION = "version"; @SearchParamDefinition(name="url", path="StructureDefinition.url", description="Absolute URL used to reference this StructureDefinition", type="uri" ) public static final String SP_URL = "url"; @SearchParamDefinition(name="path", path="StructureDefinition.snapshot.element.path|StructureDefinition.differential.element.path", description="A path that is constrained in the profile", type="token" ) public static final String SP_PATH = "path"; @SearchParamDefinition(name="ext-context", path="StructureDefinition.context", description="Where the extension can be used in instances", type="string" ) public static final String SP_EXTCONTEXT = "ext-context"; @SearchParamDefinition(name="name", path="StructureDefinition.name", description="Name of the profile", type="string" ) public static final String SP_NAME = "name"; @SearchParamDefinition(name="context", path="StructureDefinition.useContext", description="A use context assigned to the structure", type="token" ) public static final String SP_CONTEXT = "context"; @SearchParamDefinition(name="base-path", path="StructureDefinition.snapshot.element.base.path|StructureDefinition.differential.element.base.path", description="Path that identifies the base element", type="token" ) public static final String SP_BASEPATH = "base-path"; @SearchParamDefinition(name="publisher", path="StructureDefinition.publisher", description="Name of the publisher of the profile", type="string" ) public static final String SP_PUBLISHER = "publisher"; @SearchParamDefinition(name="status", path="StructureDefinition.status", description="The current status of the profile", type="token" ) public static final String SP_STATUS = "status"; @SearchParamDefinition(name="base", path="StructureDefinition.base", description="Structure that this set of constraints applies to", type="uri" ) public static final String SP_BASE = "base"; }
bjornna/hapi-fhir
hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/StructureDefinition.java
Java
apache-2.0
117,624
/* * Copyright 1999-2019 Seata.io Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.seata.sqlparser.antlr.mysql.listener; import io.seata.sqlparser.antlr.mysql.MySqlContext; import io.seata.sqlparser.antlr.mysql.parser.MySqlParser; import io.seata.sqlparser.antlr.mysql.parser.MySqlParserBaseListener; import io.seata.sqlparser.antlr.mysql.visit.StatementSqlVisitor; /** * @author houzhi */ public class UpdateSpecificationSqlListener extends MySqlParserBaseListener { private MySqlContext sqlQueryContext; public UpdateSpecificationSqlListener(MySqlContext sqlQueryContext) { this.sqlQueryContext = sqlQueryContext; } @Override public void enterTableName(MySqlParser.TableNameContext ctx) { sqlQueryContext.setTableName(ctx.getText()); super.enterTableName(ctx); } @Override public void enterConstantExpressionAtomForUpdate(MySqlParser.ConstantExpressionAtomForUpdateContext ctx) { sqlQueryContext.addUpdateWhereValColumnNames(ctx.getText()); super.enterConstantExpressionAtomForUpdate(ctx); } @Override public void enterFullColumnNameExpressionAtomForUpdate(MySqlParser.FullColumnNameExpressionAtomForUpdateContext ctx) { sqlQueryContext.addUpdateWhereColumnNames(ctx.getText()); super.enterFullColumnNameExpressionAtomForUpdate(ctx); } @Override public void enterSingleUpdateStatement(MySqlParser.SingleUpdateStatementContext ctx) { MySqlParser.ExpressionForUpdateContext expressionForUpdateContext = ctx.expressionForUpdate(); StatementSqlVisitor statementSqlVisitor = new StatementSqlVisitor(); String text = statementSqlVisitor.visit(expressionForUpdateContext).toString(); sqlQueryContext.setWhereCondition(text); MySqlParser.UidContext uid = ctx.uid(); if (uid != null) { String alias = uid.getText(); if (!text.isEmpty()) { sqlQueryContext.setTableAlias(alias); } } super.enterSingleUpdateStatement(ctx); } @Override public void enterUpdatedElement(MySqlParser.UpdatedElementContext ctx) { MySqlParser.ExpressionContext expression = ctx.expression(); sqlQueryContext.addUpdateValues(expression.getText()); MySqlParser.FullColumnNameContext fullColumnNameContext = ctx.fullColumnName(); sqlQueryContext.addUpdateColumnNames(fullColumnNameContext.getText()); super.enterUpdatedElement(ctx); } }
seata/seata
sqlparser/seata-sqlparser-antlr/src/main/java/io/seata/sqlparser/antlr/mysql/listener/UpdateSpecificationSqlListener.java
Java
apache-2.0
3,049
"""Provide common test tools for Z-Wave JS.""" AIR_TEMPERATURE_SENSOR = "sensor.multisensor_6_air_temperature" HUMIDITY_SENSOR = "sensor.multisensor_6_humidity" ENERGY_SENSOR = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed_2" POWER_SENSOR = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed" SWITCH_ENTITY = "switch.smart_plug_with_two_usb_ports" LOW_BATTERY_BINARY_SENSOR = "binary_sensor.multisensor_6_low_battery_level" ENABLED_LEGACY_BINARY_SENSOR = "binary_sensor.z_wave_door_window_sensor_any" DISABLED_LEGACY_BINARY_SENSOR = "binary_sensor.multisensor_6_any" NOTIFICATION_MOTION_BINARY_SENSOR = ( "binary_sensor.multisensor_6_home_security_motion_detection" ) NOTIFICATION_MOTION_SENSOR = "sensor.multisensor_6_home_security_motion_sensor_status" PROPERTY_DOOR_STATUS_BINARY_SENSOR = ( "binary_sensor.august_smart_lock_pro_3rd_gen_the_current_status_of_the_door" ) CLIMATE_RADIO_THERMOSTAT_ENTITY = "climate.z_wave_thermostat" CLIMATE_DANFOSS_LC13_ENTITY = "climate.living_connect_z_thermostat" CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY = "climate.thermostatic_valve" CLIMATE_FLOOR_THERMOSTAT_ENTITY = "climate.floor_thermostat" CLIMATE_MAIN_HEAT_ACTIONNER = "climate.main_heat_actionner" BULB_6_MULTI_COLOR_LIGHT_ENTITY = "light.bulb_6_multi_color" EATON_RF9640_ENTITY = "light.allloaddimmer" AEON_SMART_SWITCH_LIGHT_ENTITY = "light.smart_switch_6" ID_LOCK_CONFIG_PARAMETER_SENSOR = ( "sensor.z_wave_module_for_id_lock_150_and_101_config_parameter_door_lock_mode" )
w1ll1am23/home-assistant
tests/components/zwave_js/common.py
Python
apache-2.0
1,508
+++ date = "2015-03-19T14:27:51-04:00" title = "Authenticating" [menu.main] parent = "Connecting" identifier = "Authenticating" weight = 20 pre = "<i class='fa'></i>" +++ # Authentication The Node.js driver supports all MongoDB [authentication mechanisms](http://docs.mongodb.org/manual/core/authentication/), including those only available in the MongoDB [Enterprise Edition](http://docs.mongodb.org/manual/administration/install-enterprise/). {{% note %}} MongoDB 3.0 changed the default authentication mechanism from [MONGODB-CR](http://docs.mongodb.org/manual/core/authentication/#mongodb-cr-authentication) to [SCRAM-SHA-1](http://docs.mongodb.org/manual/core/authentication/#scram-sha-1-authentication). {{% /note %}} ## DEFAULT If no authentication mechanism is specified or the mechanism DEFAULT is specified, the driver will attempt to authenticate using the SCRAM-SHA-1 authentication method if it is available on the MongoDB server. If the server does not support SCRAM-SHA-1 the driver will authenticate using MONGODB-CR. ```js var MongoClient = require('mongodb').MongoClient, f = require('util').format, assert = require('assert'); // Connection URL var url = 'mongodb://dave:password@localhost:27017?authMechanism=DEFAULT&authSource=db'; // Use connect method to connect to the Server MongoClient.connect(url, function(err, db) { assert.equal(null, err); console.log("Connected correctly to server"); db.close(); }); ``` ## SCRAM-SHA-1 To explicitly connect to MongoDB using [SCRAM-SHA-1](http://docs.mongodb .org/manual/core/authentication/#scram-sha-1-authentication), we pass the following parameters to the driver over the connection URI. ```js var MongoClient = require('mongodb').MongoClient, f = require('util').format, assert = require('assert'); // Connection URL var url = 'mongodb://dave:password@localhost:27017?authMechanism=SCRAM-SHA-1&authSource=db'; // Use connect method to connect to the Server MongoClient.connect(url, function(err, db) { assert.equal(null, err); console.log("Connected correctly to server"); db.close(); }); ``` The URI uses the authMechanism `SCRAM-SHA-1` with the user `dave` and password `password` against the database `db`. ## MONGODB-CR To explicitly create a credential of type [MONGODB-CR](http://docs.mongodb.org/manual/core/authentication/#mongodb-cr-authentication), we pass the following parameters to the driver over the connection URI. ```js var MongoClient = require('mongodb').MongoClient, f = require('util').format, assert = require('assert'); // Connection URL var url = 'mongodb://dave:password@localhost:27017?authMechanism=MONGODB-CR&authSource=db'; // Use connect method to connect to the Server MongoClient.connect(url, function(err, db) { assert.equal(null, err); console.log("Connected correctly to server"); db.close(); }); ``` The URI uses the authMechanism `MONGODB-CR` with the user `dave` and password `password` against the database `db`. {{% note class="important" %}} If you specify the `MONGODB-CR` authMechanism the authentication might fail once you upgrade MongoDB to 3.0 or higher due to new users only being created using the `SCRAM-SHA-1` mechanism. {{% /note %}} ## X509 The [x.509](http://docs.mongodb.org/manual/core/authentication/#x-509-certificate-authentication) mechanism authenticates a user whose name is derived from the distinguished subject name of the X.509 certificate presented by the driver during SSL negotiation. This authentication method requires the use of SSL connections with certificate validation and is available in MongoDB 2.6 and newer. The example below shows how you connect using a X509 certificate using `MongoClient`. We assume that the `client.pem` file here is a valid X509 certificate and that the MongoDB server is correctly configured. ```js var MongoClient = require('mongodb').MongoClient, f = require('util').format, assert = require('assert'); // Read the cert and key var cert = fs.readFileSync(__dirname + "/ssl/x509/client.pem"); var key = fs.readFileSync(__dirname + "/ssl/x509/client.pem"); // User name var userName = encodeURIComponent("CN=client,OU=kerneluser,O=10Gen,L=New York City,ST=New York,C=US"); // Connect using X509 authentication MongoClient.connect(f('mongodb://%s@server:27017/test?authMechanism=MONGODB-X509&ssl=true', userName), { server: { sslKey:key , sslCert:cert , sslValidate:false } }, function(err, db) { assert.equal(null, err); console.log("Connected correctly to server"); db.close(); }); ``` See the MongoDB server [x.509 tutorial](http://docs.mongodb.org/manual/tutorial/configure-x509-client-authentication/#add-x-509-certificate-subject-as-a-user) for more information about determining the subject name from the certificate. ## Against The Specified Database ## Kerberos (GSSAPI/SSPI) [MongoDB Enterprise](http://www.mongodb.com/products/mongodb-enterprise) supports proxy authentication through a Kerberos service. The Node.js driver supports Kerberos on UNIX via the MIT Kerberos library and on Windows via the SSPI API. Below is an example on how to connect to MongoDB using Kerberos for UNIX. ```js var MongoClient = require('mongodb').MongoClient, f = require('util').format, assert = require('assert'); // KDC Server var server = "kerberos.example.com"; var principal = "drivers@KERBEROS.EXAMPLE.COM"; var urlEncodedPrincipal = encodeURIComponent(principal); // Let's write the actual connection code MongoClient.connect(format("mongodb://%s@%s/kerberos?authMechanism=GSSAPI&gssapiServiceName=mongodb", urlEncodedPrincipal, server), function(err, db) { assert.equal(null, err); db.close(); test.done(); }); ``` {{% note %}} The method refers to the `GSSAPI` authentication mechanism instead of `Kerberos` because technically the driver is authenticating via the [GSSAPI](https://tools.ietf.org/html/rfc4752) SASL mechanism. {{% /note %}} ## LDAP (PLAIN) [MongoDB Enterprise](http://www.mongodb.com/products/mongodb-enterprise) supports proxy authentication through a Lightweight Directory Access Protocol (LDAP) service. ```js var MongoClient = require('mongodb').MongoClient, f = require('util').format, assert = require('assert'); // LDAP Server var server = "ldap.example.com"; var user = "ldap-user"; var pass = "ldap-password"; // Url var url = format("mongodb://%s:%s@%s/test?authMechanism=PLAIN&maxPoolSize=1", user, pass, server); // Let's write the actual connection code MongoClient.connect(url, function(err, db) { test.equal(null, err); db.close(); test.done(); }); ``` {{% note %}} The method refers to the `plain` authentication mechanism instead of `LDAP` because technically the driver is authenticating via the [PLAIN](https://www.ietf.org/rfc/rfc4616.txt) SASL mechanism. {{% /note %}}
flyingfisher/node-mongodb-native
docs/reference/content/reference/connecting/authenticating.md
Markdown
apache-2.0
6,820
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/elastictranscoder/model/Warning.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace ElasticTranscoder { namespace Model { Warning::Warning() : m_codeHasBeenSet(false), m_messageHasBeenSet(false) { } Warning::Warning(JsonView jsonValue) : m_codeHasBeenSet(false), m_messageHasBeenSet(false) { *this = jsonValue; } Warning& Warning::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("Code")) { m_code = jsonValue.GetString("Code"); m_codeHasBeenSet = true; } if(jsonValue.ValueExists("Message")) { m_message = jsonValue.GetString("Message"); m_messageHasBeenSet = true; } return *this; } JsonValue Warning::Jsonize() const { JsonValue payload; if(m_codeHasBeenSet) { payload.WithString("Code", m_code); } if(m_messageHasBeenSet) { payload.WithString("Message", m_message); } return payload; } } // namespace Model } // namespace ElasticTranscoder } // namespace Aws
cedral/aws-sdk-cpp
aws-cpp-sdk-elastictranscoder/source/model/Warning.cpp
C++
apache-2.0
1,657
/* global describe, it, element, by, expect */ 'use strict'; var common = require('../../common/common'); var setup = require('../../common/setup'); var authentication = require('../../authentication/authentication'); var topologyEditorCommon = require('../../topology/topology_editor_common'); var xedit = require('../../common/xedit'); var applications = require('../../applications/applications'); var csarsData = require(__dirname + '/_data/application_topology_suggestions_property/csars.json'); var applicationsData = require(__dirname + '/_data/application_topology_suggestions_property/applications.json'); var applicationversionData = require(__dirname + '/_data/application_topology_suggestions_property/applicationversions.json'); var applicationenvironmentsData = require(__dirname + '/_data/application_topology_suggestions_property/applicationenvironments.json'); var indexedartifacttypesData = require(__dirname + '/_data/application_topology_suggestions_property/indexedartifacttypes.json'); var indexedcapabilitytypesData = require(__dirname + '/_data/application_topology_suggestions_property/indexedcapabilitytypes.json'); var indexeddatatypesData = require(__dirname + '/_data/application_topology_suggestions_property/indexeddatatypes.json'); var indexednodetypesData = require(__dirname + '/_data/application_topology_suggestions_property/indexednodetypes.json'); var indexedrelationshiptypesData = require(__dirname + '/_data/application_topology_suggestions_property/indexedrelationshiptypes.json'); var suggestionentryData = require(__dirname + '/_data/application_topology_suggestions_property/suggestionentry.json'); var topologiesData = require(__dirname + '/_data/application_topology_suggestions_property/topologies.json'); describe('Suggestion on property definition tests', function() { var appName = 'AlienUI-SuggestionEntry'; var editNodePropertySuggestionAndCheck = function(nodeTemplateName, propertyName, propertyValue, componentType, count, isModalDisplay, save, selectUbuntuValue) { topologyEditorCommon.showComponentsTab(); topologyEditorCommon.selectNodeAndGoToDetailBloc(nodeTemplateName, topologyEditorCommon.nodeDetailsBlocsIds[componentType]); var propertyElement = common.element(by.id(topologyEditorCommon.nodeDetailsBlocsIds[componentType] + '-panel')); propertyElement = common.element(by.id('p_' + propertyName), propertyElement); var editForm; common.click(by.css('span[editable-text]'), propertyElement); editForm = common.element(by.tagName('form'), propertyElement); var inputValue = common.element(by.tagName('input'), editForm); inputValue.clear(); inputValue.sendKeys(propertyValue); expect(element.all(by.repeater('match in matches')).count()).toEqual(count); editForm.submit(); expect(element(by.className('modal-dialog')).isPresent()).toBe(isModalDisplay); element(by.className('modal-dialog')).isPresent().then(function (isVisible) { if (isVisible) { if (selectUbuntuValue) { element(by.css('input[value="ubuntu"]')).click(); } if (save) { common.click(by.id('btn-create')); } else { common.click(by.id('btn-cancel')); } } }); }; it('beforeAll', function() { setup.setup(); setup.index('csar', 'csar', csarsData); setup.index('application', 'application', applicationsData); setup.index('applicationversion', 'applicationversion', applicationversionData); setup.index('applicationenvironment', 'applicationenvironment', applicationenvironmentsData); setup.index('toscaelement', 'indexedartifacttype', indexedartifacttypesData); setup.index('toscaelement', 'indexedcapabilitytype', indexedcapabilitytypesData); setup.index('toscaelement', 'indexeddatatype', indexeddatatypesData); setup.index('toscaelement', 'indexednodetype', indexednodetypesData); setup.index('toscaelement', 'indexedrelationshiptype', indexedrelationshiptypesData); setup.index('suggestionentry', 'suggestionentry', suggestionentryData); setup.index('topology', 'topology', topologiesData); common.home(); authentication.login('admin'); }); it('should set the distribution to xubuntu and add it to the suggestionentry', function() { applications.goToApplicationTopologyPage(appName); editNodePropertySuggestionAndCheck('Compute', 'distribution', 'xubuntu', 'cap', 6, true, true); xedit.expect('div_distribution', 'xubuntu'); }); it('should set the distribution to ubuntu, no modal should be present', function() { applications.goToApplicationTopologyPage(appName); editNodePropertySuggestionAndCheck('Compute', 'distribution', 'ubuntu', 'cap', 5, false); xedit.expect('div_distribution', 'ubuntu'); }); it('should set the distribution to xubuntu, no modal should be present', function() { applications.goToApplicationTopologyPage(appName); editNodePropertySuggestionAndCheck('Compute', 'distribution', 'xubuntu', 'cap', 5, false); xedit.expect('div_distribution', 'xubuntu'); }); it('should set the distribution to lubuntu and select the selection with ubuntu', function() { applications.goToApplicationTopologyPage(appName); editNodePropertySuggestionAndCheck('Compute', 'distribution', 'lubuntu', 'cap', 6, true, true, true); xedit.expect('div_distribution', 'ubuntu'); }); it('should set the distribution to lubuntu and cancel the modal', function() { applications.goToApplicationTopologyPage(appName); editNodePropertySuggestionAndCheck('Compute', 'distribution', 'lubuntu', 'cap', 6, true, false); topologyEditorCommon.checkPropertyEditionError('Compute', 'distribution', 'Cancelled'); }); it('afterAll', function() { authentication.logout(); }); });
broly-git/alien4cloud
alien4cloud-ui/src/test/webapp/e2e/scenarios/application_topology/application_topology_suggestions_property.js
JavaScript
apache-2.0
5,797
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.packages.Aspect; import com.google.devtools.build.lib.packages.AspectDefinition; import com.google.devtools.build.lib.packages.AspectDescriptor; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; /** * Represents aspects that should be applied to a target as part of {@link Dependency}. * * {@link Dependency} encapsulates all information that is needed to analyze an edge between * an AspectValue or a ConfiguredTargetValue and their direct dependencies, and * {@link AspectCollection} represents an aspect-related part of this information. * * Analysis arrives to a particular node in target graph with an ordered list of aspects that need * to be applied. Some of those aspects should visible to the node in question; some of them * are not directly visible, but are visible to other aspects, as specified by * {@link AspectDefinition#getRequiredProvidersForAspects()}. * * As an example, of all these things in interplay, consider android_binary rule depending * on java_proto_library rule depending on proto_library rule; consider further that * we analyze the android_binary with some ide_info aspect: * <pre> * proto_library(name = "pl") + ide_info_aspect * ^ * | [java_proto_aspect] * java_proto_library(name = "jpl") + ide_info_aspect * ^ * | [DexArchiveAspect] * android_binary(name = "ab") + ide_info_aspect * </pre> * ide_info_aspect is interested in java_proto_aspect, but not in DexArchiveAspect. * * Let's look is the {@link AspectCollection} for a Dependency representing a jpl->pl edge * for ide_info_aspect application to target <code>jpl</code>: * <ul> * <li>the full list of aspects is [java_proto_aspect, DexArchiveAspect, ide_info_aspect] * in this order (the order is determined by the order in which aspects originate on * <code>ab->...->pl</code> path. * </li> * <li>however, DexArchiveAspect is not visible to either ide_info_aspect or java_proto_aspect, * so the reduced list(and a result of {@link #getAllAspects()}) will be * [java_proto_aspect, ide_info_aspect] * </li> * <li>both java_proto_aspect and ide_info_aspect will be visible to * <code>jpl + ide_info_aspect</code> node: the former because java_proto_library * originates java_proto_aspect, and the aspect applied to the node sees the same * dependencies; and the latter because the aspect sees itself on all targets it * propagates to. So {@link #getVisibleAspects()} will return both of them. * </li> * <li>Since ide_info_aspect declared its interest in java_proto_aspect and the latter * comes before it in the order, {@link AspectDeps} for ide_info_aspect will * contain java_proto_aspect (so the application of ide_info_aspect to <code>pl</code> * target will see java_proto_aspect as well). * </li> * </ul> * * More details on members of {@link AspectCollection} follow, as well as more examples * of aspect visibility rules. * * * <p>{@link AspectDeps} is a class that represents an aspect and all aspects that are directly * visible to it.</p> * * <p>{@link #getVisibleAspects()} returns aspects that should be visible to the node in question. * </p> * * <p>{@link #getAllAspects()} return all aspects that should be applied to the target, * in topological order.</p> * * <p>In the following scenario, consider rule r<sub>i</sub> sending an aspect a<sub>i</sub> * to its dependency: * <pre> * [r0] * ^ * (a1) | * [r1] * (a2) | * [r2] * (a3) | * [r3] * </pre> * * When a3 is propagated to target r0, the analysis arrives there with a path [a1, a2, a3]. * Since we analyse the propagation of aspect a3, the only visible aspect is a3. * * <p>Let's first assume that aspect a3 wants to see aspects a1 and a2, but aspects a1 and a2 are * not interested in each other (according to their * {@link AspectDefinition#getRequiredProvidersForAspects()}). * * Since a3 is interested in all aspects, the result of {@link #getAllAspects()} will be * [a1, a2, a3], and {@link AspectCollection} will be: * <ul> * <li>a3 -> [a1, a2], a3 is visible</li> * <li>a2 -> []</li> * <li>a1 -> []</li> * </ul> * * <p>Now what happens if a3 is interested in a2 but not a1, and a2 is interested in a1? * Again, all aspects are transitively interesting to a visible a3, so {@link #getAllAspects()} * will be [a1, a2, a3], but {@link AspectCollection} will now be: * <ul> * <li>a3 -> [a2], a3 is visible</li> * <li>a2 -> [a1]</li> * <li>a1 -> []</li> * </ul> * * <p>As a final example, what happens if a3 is interested in a1, and a1 is interested in a2, but * a3 is not interested in a2? Now the result of {@link #getAllAspects()} will be [a1, a3]. * a1 is interested in a2, but a2 comes later in the path than a1, so a1 does not see it (a1 only * started propagating on r1 -> r0 edge, and there is now a2 originating on that path). * And {@link AspectCollection} will now be: * <ul> * <li>a3 -> [a1], a3 is visible</li> * <li>a1 -> []</li> * </ul> * Note that is does not matter if a2 is interested in a1 or not - since no one after it * in the path is interested in it, a2 is filtered out. * </p> */ @Immutable public final class AspectCollection { /** all aspects in the path; transitively visible to {@link #visibleAspects} */ private final ImmutableSet<AspectDescriptor> aspectPath; /** aspects that should be visible to a dependency */ private final ImmutableSet<AspectDeps> visibleAspects; public static final AspectCollection EMPTY = new AspectCollection( ImmutableSet.<AspectDescriptor>of(), ImmutableSet.<AspectDeps>of()); private AspectCollection( ImmutableSet<AspectDescriptor> allAspects, ImmutableSet<AspectDeps> visibleAspects) { this.aspectPath = allAspects; this.visibleAspects = visibleAspects; } public Iterable<AspectDescriptor> getAllAspects() { return aspectPath; } public ImmutableSet<AspectDeps> getVisibleAspects() { return visibleAspects; } public boolean isEmpty() { return aspectPath.isEmpty(); } @Override public int hashCode() { return aspectPath.hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof AspectCollection)) { return false; } AspectCollection that = (AspectCollection) obj; return Objects.equal(aspectPath, that.aspectPath); } /** * Represents an aspect with all the aspects it depends on * (within an {@link AspectCollection}. * * We preserve the order of aspects to correspond to the order in the * original {@link AspectCollection#aspectPath}, although that is not * strictly needed semantically. */ @Immutable public static final class AspectDeps { private final AspectDescriptor aspect; private final ImmutableList<AspectDeps> dependentAspects; private AspectDeps(AspectDescriptor aspect, ImmutableList<AspectDeps> dependentAspects) { this.aspect = aspect; this.dependentAspects = dependentAspects; } public AspectDescriptor getAspect() { return aspect; } public ImmutableList<AspectDeps> getDependentAspects() { return dependentAspects; } } public static AspectCollection createForTests(AspectDescriptor... descriptors) { return createForTests(ImmutableSet.copyOf(descriptors)); } public static AspectCollection createForTests(ImmutableSet<AspectDescriptor> descriptors) { ImmutableSet.Builder<AspectDeps> depsBuilder = ImmutableSet.builder(); for (AspectDescriptor descriptor : descriptors) { depsBuilder.add(new AspectDeps(descriptor, ImmutableList.<AspectDeps>of())); } return new AspectCollection(descriptors, depsBuilder.build()); } /** * Creates an {@link AspectCollection} from an ordered list of aspects and * a set of visible aspects. * * The order of aspects is reverse to the order in which they originated, with * the earliest originating occurring last in the list. */ public static AspectCollection create( Iterable<Aspect> aspectPath, Set<AspectDescriptor> visibleAspects) throws AspectCycleOnPathException { LinkedHashMap<AspectDescriptor, Aspect> aspectMap = deduplicateAspects(aspectPath); LinkedHashMap<AspectDescriptor, ArrayList<AspectDescriptor>> deps = new LinkedHashMap<>(); // Calculate all needed aspects (either visible from outside or visible to // other needed aspects). Already discovered needed aspects are in key set of deps. // 1) Start from the end of the path. The aspect only sees other aspects that are // before it // 2) If the 'aspect' is visible from outside, it is needed. // 3) Otherwise, check whether 'aspect' is visible to any already needed aspects, // if it is visible to a needed 'depAspect', // add the 'aspect' to a list of aspects visible to 'depAspect'. // if 'aspect' is needed, add it to 'deps'. // At the end of this algorithm, key set of 'deps' contains a subset of original // aspect list consisting only of needed aspects, in reverse (since we iterate // the original list in reverse). // // deps[aspect] contains all aspects that 'aspect' needs, in reverse order. for (Map.Entry<AspectDescriptor, Aspect> aspect : ImmutableList.copyOf(aspectMap.entrySet()).reverse()) { boolean needed = visibleAspects.contains(aspect.getKey()); for (AspectDescriptor depAspectDescriptor : deps.keySet()) { if (depAspectDescriptor.equals(aspect.getKey())) { continue; } Aspect depAspect = aspectMap.get(depAspectDescriptor); if (depAspect.getDefinition().getRequiredProvidersForAspects() .isSatisfiedBy(aspect.getValue().getDefinition().getAdvertisedProviders())) { deps.get(depAspectDescriptor).add(aspect.getKey()); needed = true; } } if (needed && !deps.containsKey(aspect.getKey())) { deps.put(aspect.getKey(), new ArrayList<AspectDescriptor>()); } } // Record only the needed aspects from all aspects, in correct order. ImmutableList<AspectDescriptor> neededAspects = ImmutableList.copyOf(deps.keySet()).reverse(); // Calculate visible aspect paths. HashMap<AspectDescriptor, AspectDeps> aspectPaths = new HashMap<>(); ImmutableSet.Builder<AspectDeps> visibleAspectPaths = ImmutableSet.builder(); for (AspectDescriptor visibleAspect : visibleAspects) { visibleAspectPaths.add(buildAspectDeps(visibleAspect, aspectPaths, deps)); } return new AspectCollection(ImmutableSet.copyOf(neededAspects), visibleAspectPaths.build()); } /** * Deduplicate aspects in path. * * @throws AspectCycleOnPathException if an aspect occurs twice on the path and * the second occurrence sees a different set of aspects. */ private static LinkedHashMap<AspectDescriptor, Aspect> deduplicateAspects( Iterable<Aspect> aspectPath) throws AspectCycleOnPathException { LinkedHashMap<AspectDescriptor, Aspect> aspectMap = new LinkedHashMap<>(); ArrayList<Aspect> seenAspects = new ArrayList<>(); for (Aspect aspect : aspectPath) { if (!aspectMap.containsKey(aspect.getDescriptor())) { aspectMap.put(aspect.getDescriptor(), aspect); seenAspects.add(aspect); } else { validateDuplicateAspect(aspect, seenAspects); } } return aspectMap; } /** * Detect inconsistent duplicate occurrence of an aspect on the path. * There is a previous occurrence of {@code aspect} in {@code seenAspects}. * * If in between that previous occurrence and the newly discovered occurrence * there is an aspect that is visible to {@code aspect}, then the second occurrence * is inconsistent - the set of aspects it sees is different from the first one. */ private static void validateDuplicateAspect(Aspect aspect, ArrayList<Aspect> seenAspects) throws AspectCycleOnPathException { for (int i = seenAspects.size() - 1; i >= 0; i--) { Aspect seenAspect = seenAspects.get(i); if (aspect.getDescriptor().equals(seenAspect.getDescriptor())) { // This is a previous occurrence of the same aspect. return; } if (aspect.getDefinition().getRequiredProvidersForAspects() .isSatisfiedBy(seenAspect.getDefinition().getAdvertisedProviders())) { throw new AspectCycleOnPathException(aspect.getDescriptor(), seenAspect.getDescriptor()); } } } private static AspectDeps buildAspectDeps(AspectDescriptor descriptor, HashMap<AspectDescriptor, AspectDeps> aspectPaths, LinkedHashMap<AspectDescriptor, ArrayList<AspectDescriptor>> deps) { if (aspectPaths.containsKey(descriptor)) { return aspectPaths.get(descriptor); } ImmutableList.Builder<AspectDeps> aspectPathBuilder = ImmutableList.builder(); ArrayList<AspectDescriptor> depList = deps.get(descriptor); // deps[aspect] contains all aspects visible to 'aspect' in reverse order. for (int i = depList.size() - 1; i >= 0; i--) { aspectPathBuilder.add(buildAspectDeps(depList.get(i), aspectPaths, deps)); } AspectDeps aspectPath = new AspectDeps(descriptor, aspectPathBuilder.build()); aspectPaths.put(descriptor, aspectPath); return aspectPath; } /** * Signals an inconsistency on aspect path: an aspect occurs twice on the path and * the second occurrence sees a different set of aspects. * * {@link #getAspect()} is the aspect occuring twice, and {@link #getPreviousAspect()} * is the aspect that the second occurrence sees but the first does not. */ public static class AspectCycleOnPathException extends Exception { private final AspectDescriptor aspect; private final AspectDescriptor previousAspect; public AspectCycleOnPathException(AspectDescriptor aspect, AspectDescriptor previousAspect) { super(String.format("Aspect %s is applied twice, both before and after aspect %s", aspect.getDescription(), previousAspect.getDescription() )); this.aspect = aspect; this.previousAspect = previousAspect; } public AspectDescriptor getAspect() { return aspect; } public AspectDescriptor getPreviousAspect() { return previousAspect; } } }
ButterflyNetwork/bazel
src/main/java/com/google/devtools/build/lib/analysis/AspectCollection.java
Java
apache-2.0
15,413
'use strict'; // ========================================================================= // // Policies for users // // ========================================================================= var acl = require('acl'); acl = new acl(new acl.memoryBackend()); var helpers = require (require('path').resolve('./modules/core/server/controllers/core.helpers.controller')); exports.invokeRolesPolicies = function () { // empty means that ONLY admin can use these paths helpers.setPathPermissions (acl, [ [ 'guest', 'user', '/api/new/user' ], ['', 'user', '/api/onboardUser'] ]); }; exports.isAllowed = helpers.isAllowed (acl);
logancodes/esm-server
modules/users/server/policies/admin.server.policy.js
JavaScript
apache-2.0
641
package # hide package from pause DBIx::Class::PK::Auto::Pg; use strict; use warnings; use base qw/DBIx::Class/; __PACKAGE__->load_components(qw/PK::Auto/); 1; __END__ =head1 NAME DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg =head1 SYNOPSIS Just load PK::Auto instead; auto-inc is now handled by Storage. =head1 FURTHER QUESTIONS? Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>. =head1 COPYRIGHT AND LICENSE This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE> by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can redistribute it and/or modify it under the same terms as the L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
ray66rus/vndrv
local/lib/perl5/DBIx/Class/PK/Auto/Pg.pm
Perl
apache-2.0
748
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.intercept; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.testng.Assert.assertEquals; import com.google.common.collect.Maps; import io.netty.buffer.ByteBuf; import org.apache.bookkeeper.mledger.Entry; import org.apache.pulsar.broker.PulsarService; import org.apache.pulsar.broker.service.Consumer; import org.apache.pulsar.broker.service.Producer; import org.apache.pulsar.broker.service.ServerCnx; import org.apache.pulsar.broker.service.Subscription; import org.apache.pulsar.broker.service.Topic; import org.apache.pulsar.common.api.proto.BaseCommand; import org.apache.pulsar.common.api.proto.CommandAck; import org.apache.pulsar.common.api.proto.MessageMetadata; import org.apache.pulsar.common.intercept.InterceptException; import org.apache.pulsar.common.nar.NarClassLoader; import org.testng.annotations.Test; import javax.servlet.FilterChain; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import java.util.Map; /** * Unit test {@link BrokerInterceptorWithClassLoader}. */ @Test(groups = "broker") public class BrokerInterceptorWithClassLoaderTest { @Test public void testWrapper() throws Exception { BrokerInterceptor h = mock(BrokerInterceptor.class); NarClassLoader loader = mock(NarClassLoader.class); BrokerInterceptorWithClassLoader wrapper = new BrokerInterceptorWithClassLoader(h, loader); PulsarService pulsarService = mock(PulsarService.class); wrapper.initialize(pulsarService); verify(h, times(1)).initialize(same(pulsarService)); } @Test public void testClassLoaderSwitcher() throws Exception { NarClassLoader narLoader = mock(NarClassLoader.class); BrokerInterceptor interceptor = new BrokerInterceptor() { @Override public void beforeSendMessage(Subscription subscription, Entry entry, long[] ackSet, MessageMetadata msgMetadata) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void onConnectionCreated(ServerCnx cnx) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void producerCreated(ServerCnx cnx, Producer producer, Map<String, String> metadata) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void consumerCreated(ServerCnx cnx, Consumer consumer, Map<String, String> metadata) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void messageProduced(ServerCnx cnx, Producer producer, long startTimeNs, long ledgerId, long entryId, Topic.PublishContext publishContext) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void messageDispatched(ServerCnx cnx, Consumer consumer, long ledgerId, long entryId, ByteBuf headersAndPayload) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void messageAcked(ServerCnx cnx, Consumer consumer, CommandAck ackCmd) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void onPulsarCommand(BaseCommand command, ServerCnx cnx) throws InterceptException { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void onConnectionClosed(ServerCnx cnx) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void onWebserviceRequest(ServletRequest request) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void onWebserviceResponse(ServletRequest request, ServletResponse response) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void onFilter(ServletRequest request, ServletResponse response, FilterChain chain) { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void initialize(PulsarService pulsarService) throws Exception { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } @Override public void close() { assertEquals(Thread.currentThread().getContextClassLoader(), narLoader); } }; BrokerInterceptorWithClassLoader brokerInterceptorWithClassLoader = new BrokerInterceptorWithClassLoader(interceptor, narLoader); ClassLoader curClassLoader = Thread.currentThread().getContextClassLoader(); // test class loader assertEquals(brokerInterceptorWithClassLoader.getClassLoader(), narLoader); // test initialize brokerInterceptorWithClassLoader.initialize(mock(PulsarService.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test onFilter brokerInterceptorWithClassLoader.onFilter(mock(ServletRequest.class) , mock(ServletResponse.class), mock(FilterChain.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test onWebserviceResponse brokerInterceptorWithClassLoader.onWebserviceResponse(mock(ServletRequest.class) , mock(ServletResponse.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test onWebserviceRequest brokerInterceptorWithClassLoader.onWebserviceRequest(mock(ServletRequest.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test onConnectionClosed brokerInterceptorWithClassLoader.onConnectionClosed(mock(ServerCnx.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test onPulsarCommand brokerInterceptorWithClassLoader.onPulsarCommand(null, mock(ServerCnx.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test messageAcked brokerInterceptorWithClassLoader .messageAcked(mock(ServerCnx.class), mock(Consumer.class), null); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test messageDispatched brokerInterceptorWithClassLoader .messageDispatched(mock(ServerCnx.class), mock(Consumer.class), 1, 1, null); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test messageProduced brokerInterceptorWithClassLoader .messageProduced(mock(ServerCnx.class), mock(Producer.class), 1, 1, 1, null); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test consumerCreated brokerInterceptorWithClassLoader .consumerCreated(mock(ServerCnx.class), mock(Consumer.class), Maps.newHashMap()); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test producerCreated brokerInterceptorWithClassLoader .producerCreated(mock(ServerCnx.class), mock(Producer.class), Maps.newHashMap()); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test onConnectionCreated brokerInterceptorWithClassLoader .onConnectionCreated(mock(ServerCnx.class)); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test beforeSendMessage brokerInterceptorWithClassLoader .beforeSendMessage(mock(Subscription.class), mock(Entry.class), null, null); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); // test close brokerInterceptorWithClassLoader.close(); assertEquals(Thread.currentThread().getContextClassLoader(), curClassLoader); } }
massakam/pulsar
pulsar-broker/src/test/java/org/apache/pulsar/broker/intercept/BrokerInterceptorWithClassLoaderTest.java
Java
apache-2.0
9,510
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.storage.am.lsm.btree.impls; import java.util.Set; import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilter; import org.apache.hyracks.storage.am.btree.impls.BTree; import org.apache.hyracks.storage.am.lsm.common.api.AbstractLSMWithBloomFilterDiskComponent; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter; import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex; import org.apache.hyracks.storage.common.buffercache.IBufferCache; public class LSMBTreeWithBloomFilterDiskComponent extends AbstractLSMWithBloomFilterDiskComponent { private final BTree btree; private final BloomFilter bloomFilter; public LSMBTreeWithBloomFilterDiskComponent(AbstractLSMIndex lsmIndex, BTree btree, BloomFilter bloomFilter, ILSMComponentFilter filter) { super(lsmIndex, LSMBTreeDiskComponent.getMetadataPageManager(btree), filter); this.btree = btree; this.bloomFilter = bloomFilter; } @Override public BloomFilter getBloomFilter() { return bloomFilter; } @Override public IBufferCache getBloomFilterBufferCache() { return getMetadataHolder().getBufferCache(); } @Override public long getComponentSize() { return LSMBTreeDiskComponent.getComponentSize(btree) + getComponentSize(bloomFilter); } @Override public Set<String> getLSMComponentPhysicalFiles() { Set<String> files = LSMBTreeDiskComponent.getFiles(btree); addFiles(files, bloomFilter); return files; } static void addFiles(Set<String> files, BloomFilter bloomFilter) { files.add(bloomFilter.getFileReference().getFile().getAbsolutePath()); } @Override public int getFileReferenceCount() { return LSMBTreeDiskComponent.getFileReferenceCount(btree); } @Override public BTree getMetadataHolder() { return btree; } @Override public BTree getIndex() { return btree; } static long getComponentSize(BloomFilter bloomFilter) { return bloomFilter.getFileReference().getFile().length(); } }
apache/incubator-asterixdb
hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBloomFilterDiskComponent.java
Java
apache-2.0
2,959
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marcosbarbero.cloud.autoconfigure.zuul.ratelimit.config.properties.validators; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.validation.Constraint; import javax.validation.Payload; /** * @author Liel Chayoun */ @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Constraint(validatedBy = PoliciesValidator.class) public @interface Policies { String message() default "Policy must contain limit, quota or both"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; }
marcosbarbero/spring-cloud-zuul-ratelimit
spring-cloud-zuul-ratelimit-core/src/main/java/com/marcosbarbero/cloud/autoconfigure/zuul/ratelimit/config/properties/validators/Policies.java
Java
apache-2.0
1,289
package com.perforce.p4java.tests.dev.unit.feature.client; import com.perforce.p4java.core.file.IntegrationOptions; import com.perforce.p4java.tests.dev.annotations.TestId; import com.perforce.p4java.tests.dev.unit.P4JavaRshTestCase; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * * This class lightly exercises the ClientIntegration tasks in p4Java starting with * IntegrationOptions. This will be added to as we run across opportunities to add tests. */ @TestId("ClientIntegrationTest01") public class ClientIntegrationTest extends P4JavaRshTestCase { /** * IntegrationOptions default constructor. Ensures all values * are initially false. */ @Test public void testDefaultIntegrationOptions() throws Exception { try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); boolean baselessMergeVal = intOptions.isBaselessMerge(); boolean bidirectionalIntegVal = intOptions.isBidirectionalInteg(); boolean displayBaseDetailsVal = intOptions.isDisplayBaseDetails(); boolean dontCopyToClientVal = intOptions.isDontCopyToClient(); boolean forceVal = intOptions.isForce(); boolean propagateTypeVal = intOptions.isPropagateType(); boolean reverseMapping = intOptions.isReverseMapping(); boolean useHaveRevVal = intOptions.isUseHaveRev(); assertFalse("Default value expected to be false.", baselessMergeVal); assertFalse("Default value expected to be false.", bidirectionalIntegVal); assertFalse("Default value expected to be false.", displayBaseDetailsVal); assertFalse("Default value expected to be false.", dontCopyToClientVal); assertFalse("Default value expected to be false.", forceVal); assertFalse("Default value expected to be false.", propagateTypeVal); assertFalse("Default value expected to be false.", reverseMapping); assertFalse("Default value expected to be false.", useHaveRevVal); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * IntegrationOptions Explicit Value constructor. Individually set each value to * true and verify that it returns true. */ @Test public void testIntegrationOptionsSettersGetters() throws Exception { try { debugPrintTestName(); String [] deletedOptions = { "d" }; IntegrationOptions intOptions = new IntegrationOptions(); intOptions.setBaselessMerge(true); intOptions.setBidirectionalInteg(true); intOptions.setDisplayBaseDetails(true); intOptions.setDontCopyToClient(true); intOptions.setForce(true); intOptions.setPropagateType(true); intOptions.setReverseMapping(true); intOptions.setUseHaveRev(true); intOptions.setDeletedOptions(deletedOptions); assertTrue("Value expected to be true.", intOptions.isBaselessMerge()); assertTrue("Value expected to be true.", intOptions.isBidirectionalInteg()); assertTrue("Value expected to be true.", intOptions.isDisplayBaseDetails()); assertTrue("Value expected to be true.", intOptions.isDontCopyToClient()); assertTrue("Value expected to be true.", intOptions.isForce()); assertTrue("Value expected to be true.", intOptions.isPropagateType()); assertTrue("Value expected to be true.", intOptions.isReverseMapping()); assertTrue("Value expected to be true.", intOptions.isUseHaveRev()); assertEquals("Array lengths for deletedOptions should match.", deletedOptions.length, intOptions.getDeletedOptions().length); String [] actDeletedOptions = intOptions.getDeletedOptions(); assertEquals("Array values for deletedOptions should match.", deletedOptions[0], actDeletedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * IntegrationOptions Explicit Value constructor. Individually set each value to * true and verify that it returns true. */ @Test public void testIntegrationOptionsExplicitValueConstructor() throws Exception { try { debugPrintTestName(); String [] deletedOptions = { "d" }; IntegrationOptions intOptions = new IntegrationOptions(true, true, true, true, true, true, true, true, deletedOptions); assertTrue("Value expected to be true.", intOptions.isBaselessMerge()); assertTrue("Value expected to be true.", intOptions.isBidirectionalInteg()); assertTrue("Value expected to be true.", intOptions.isDisplayBaseDetails()); assertTrue("Value expected to be true.", intOptions.isDontCopyToClient()); assertTrue("Value expected to be true.", intOptions.isForce()); assertTrue("Value expected to be true.", intOptions.isPropagateType()); assertTrue("Value expected to be true.", intOptions.isReverseMapping()); assertTrue("Value expected to be true.", intOptions.isUseHaveRev()); assertEquals("Array lengths for deletedOptions should match.", deletedOptions.length, intOptions.getDeletedOptions().length); String [] actDeletedOptions = intOptions.getDeletedOptions(); assertEquals("Array values for deletedOptions should match.", deletedOptions[0], actDeletedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * FIXME Tests getDeletedOptions(). Need to verify something here. */ @Test public void testGetDeletedOptions() throws Exception { String [] deletedOptions; try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); deletedOptions = intOptions.getDeletedOptions(); debugPrint("DeletedOptions: " + deletedOptions); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * Tests setDeletedOptions() to make sure values remain set. * deletedOptions - if non-null, must contain zero or more non-null entries with * individual values "d", "Dt", "Ds", or "Di"; null, inconsistent, or conflicting * option values here will have unspecified and potentially incorrect effects. */ @Test public void testSetDeletedOptionsLittleD() throws Exception { final String [] newOptions = { "d" }; String [] returnedOptions; try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); intOptions.setDeletedOptions(newOptions); returnedOptions = intOptions.getDeletedOptions(); assertEquals("Wrong number of DeletedOptions returned.", newOptions.length, returnedOptions.length); debugPrint("Num DeletedOptions: " + newOptions.length, "Num returnedOptions: " + returnedOptions.length); assertNotNull("DeletedOptions should not be Null.", returnedOptions); debugPrint("DeletedOptions: ", newOptions[0], returnedOptions[0]); assertEquals("DeletedOptions should be " + newOptions[0], newOptions[0], returnedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * Tests setDeletedOptions() to make sure values remain set. * deletedOptions - if non-null, must contain zero or more non-null entries with * individual values "d", "Dt", "Ds", or "Di"; null, inconsistent, or conflicting * option values here will have unspecified and potentially incorrect effects. */ @Test public void testSetDeletedOptionsDt() throws Exception { final String [] newOptions = { "Dt" }; String [] returnedOptions; try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); intOptions.setDeletedOptions(newOptions); returnedOptions = intOptions.getDeletedOptions(); assertEquals("Wrong number of DeletedOptions returned.", newOptions.length, returnedOptions.length); debugPrint("Num DeletedOptions: " + newOptions.length, "Num returnedOptions: " + returnedOptions.length); assertNotNull("DeletedOptions should not be Null.", returnedOptions); debugPrint("DeletedOptions: ", newOptions[0], returnedOptions[0]); assertEquals("DeletedOptions should be " + newOptions[0], newOptions[0], returnedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * Tests setDeletedOptions() to make sure values remain set. * deletedOptions - if non-null, must contain zero or more non-null entries with * individual values "d", "Dt", "Ds", or "Di"; null, inconsistent, or conflicting * option values here will have unspecified and potentially incorrect effects. */ @Test public void testSetDeletedOptionsLittleDs() throws Exception { final String [] newOptions = { "Ds" }; String [] returnedOptions; try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); intOptions.setDeletedOptions(newOptions); returnedOptions = intOptions.getDeletedOptions(); assertEquals("Wrong number of DeletedOptions returned.", newOptions.length, returnedOptions.length); debugPrint("Num DeletedOptions: " + newOptions.length, "Num returnedOptions: " + returnedOptions.length); assertNotNull("DeletedOptions should not be Null.", returnedOptions); debugPrint("DeletedOptions: ", newOptions[0], returnedOptions[0]); assertEquals("DeletedOptions should be " + newOptions[0], newOptions[0], returnedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * Tests setDeletedOptions() to make sure values remain set. * deletedOptions - if non-null, must contain zero or more non-null entries with * individual values "d", "Dt", "Ds", or "Di"; null, inconsistent, or conflicting * option values here will have unspecified and potentially incorrect effects. */ @Test public void testSetDeletedOptionsDi() throws Exception { final String [] newOptions = { "Di" }; String [] returnedOptions; try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); intOptions.setDeletedOptions(newOptions); returnedOptions = intOptions.getDeletedOptions(); assertEquals("Wrong number of DeletedOptions returned.", newOptions.length, returnedOptions.length); debugPrint("Num DeletedOptions: " + newOptions.length, "Num returnedOptions: " + returnedOptions.length); assertNotNull("DeletedOptions should not be Null.", returnedOptions); debugPrint("DeletedOptions: ", newOptions[0], returnedOptions[0]); assertEquals("DeletedOptions should be " + newOptions[0], newOptions[0], returnedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } /** * Tests setDeletedOptions() to make sure values remain set. * deletedOptions - if non-null, must contain zero or more non-null entries with * individual values "d", "Dt", "Ds", or "Di"; null, inconsistent, or conflicting * option values here will have unspecified and potentially incorrect effects. */ @Test public void testSetDeletedOptionsDsDtDi() throws Exception { final String [] newOptions = { "Ds Dt Di" }; String [] returnedOptions; try { debugPrintTestName(); IntegrationOptions intOptions = new IntegrationOptions(); intOptions.setDeletedOptions(newOptions); returnedOptions = intOptions.getDeletedOptions(); assertEquals("Wrong number of DeletedOptions returned.", newOptions.length, returnedOptions.length); debugPrint("Num DeletedOptions: " + newOptions.length, "Num returnedOptions: " + returnedOptions.length); assertNotNull("DeletedOptions should not be Null.", returnedOptions); debugPrint("DeletedOptions: ", newOptions[0], returnedOptions[0]); assertEquals("DeletedOptions should be " + newOptions[0], newOptions[0], returnedOptions[0]); } catch (Exception exc) { fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage()); } } }
groboclown/p4ic4idea
p4java/src/test/java/com/perforce/p4java/tests/dev/unit/feature/client/ClientIntegrationTest.java
Java
apache-2.0
12,211
/* %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % % % % % % % DDDD RRRR AAA W W % % D D R R A A W W % % D D RRRR AAAAA W W % % D D R R A A W W W % % DDDD R R A A W W % % % % TTTTT EEEEE SSSSS TTTTT % % T E SS T % % T EEE SSS T % % T E SS T % % T EEEEE SSSSS T % % % % % % MagickWand Drawing Tests % % % % Software Design % % Cristy % % Bob Friesenhahn % % March 2002 % % % % % % Copyright 1999-2014 ImageMagick Studio LLC, a non-profit organization % % dedicated to making software imaging solutions freely available. % % % % You may not use this file except in compliance with the License. You may % % obtain a copy of the License at % % % % http://www.imagemagick.org/script/license.php % % % % Unless required by applicable law or agreed to in writing, software % % distributed under the License is distributed on an "AS IS" BASIS, % % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. % % See the License for the specific language governing permissions and % % limitations under the License. % % % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % % % */ #include <stdio.h> #include <stdlib.h> #include <wand/MagickWand.h> #define ThrowWandException(wand) \ { \ char \ *description; \ \ ExceptionType \ severity; \ \ description=MagickGetException(wand,&severity); \ (void) FormatLocaleFile(stderr,"%s %s %lu %s\n",GetMagickModule(), \ description); \ description=(char *) MagickRelinquishMemory(description); \ exit(-1); \ } static MagickBooleanType ScribbleImage (MagickWand *canvas) { DrawingWand *picasso; PixelWand *color; picasso=NewDrawingWand(); color=NewPixelWand(); (void) PushDrawingWand(picasso); { DrawSetViewbox(picasso,0,0,(ssize_t) MagickGetImageWidth(canvas), (ssize_t) MagickGetImageHeight(canvas)); DrawScale(picasso,1.101,1.08); DrawTranslate(picasso,-23.69,-22.97); DrawRotate(picasso,0); (void) PixelSetColor(color,"#ffffff"); DrawSetFillColor(picasso,color); DrawRectangle(picasso,23.69,22.97,564.6,802.2); DrawSetFillOpacity(picasso,1.0); (void) PixelSetColor(color,"none"); DrawSetFillColor(picasso,color); DrawSetStrokeColor(picasso,color); DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); DrawPushDefs(picasso); { DrawPushClipPath(picasso,"clip_1"); { (void) PushDrawingWand(picasso); { DrawRectangle(picasso,0,0,595.3,841.9); } (void) PopDrawingWand(picasso); } DrawPopClipPath(picasso); } DrawPopDefs(picasso); (void) PushDrawingWand(picasso); { (void) DrawSetClipPath(picasso, "url(#clip_1)"); (void) PushDrawingWand(picasso); { DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,4.032); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#ff0000"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#ff00ff"); DrawSetFillColor(picasso,color); DrawRectangle(picasso,72,72,144,144); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,9); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#00ff00"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#0080ff"); DrawSetFillColor(picasso,color); DrawRoundRectangle(picasso,72,216,360,432,9,9); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[37] = { { 378.1,81.72 }, { 381.1,79.56 }, { 384.3,78.12 }, { 387.6,77.33 }, { 391.1,77.11 }, { 394.6,77.62 }, { 397.8,78.77 }, { 400.9,80.57 }, { 403.6,83.02 }, { 523.9,216.8 }, { 526.2,219.7 }, { 527.6,223 }, { 528.4,226.4 }, { 528.6,229.8 }, { 528,233.3 }, { 526.9,236.5 }, { 525.1,239.5 }, { 522.6,242.2 }, { 495.9,266.3 }, { 493,268.5 }, { 489.7,269.9 }, { 486.4,270.8 }, { 482.9,270.9 }, { 479.5,270.4 }, { 476.2,269.3 }, { 473.2,267.5 }, { 470.4,265 }, { 350,131.2 }, { 347.8,128.3 }, { 346.4,125.1 }, { 345.6,121.7 }, {345.4,118.2 }, { 346,114.8 }, { 347.1,111.5 }, { 348.9,108.5 }, { 351.4,105.8 }, { 378.1,81.72 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,2.016); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#000080"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#c2c280"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,37,points); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,3.024); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#000080"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#000080"); DrawSetFillColor(picasso,color); DrawEllipse(picasso,489.6,424.8,72,129.6,0,360); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[48] = { { 213.8,25.13}, { 216.7,24.48 }, {219.8,24.55 }, { 223.1,25.42 }, { 226.7,27 }, { 230.3,29.3 }, { 234.1,32.26 }, { 237.9,35.86 }, { 241.8,40.03 }, { 249.7,50.11 }, { 257.4,62.14 }, { 264.8,75.89 }, { 271.6,91.15 }, { 277.3,106.8 }, { 281.6,121.8 }, { 284.4,135.9 }, { 285.7,148.5 }, { 285.6,159.6 }, { 284.9,164.3 }, { 283.8,168.5 }, { 282.5,172.1 }, { 280.7,175 }, { 278.5,177.3 }, { 275.9,178.7 }, { 273,179.4 }, { 269.9,179.3 }, { 266.6,178.4 }, { 263.1,176.8 }, { 259.5,174.5}, { 255.7,171.6 }, { 251.9,168 }, { 248,163.8 }, { 244.1,159 }, { 240.1,153.7 }, { 232.3,141.7 }, { 225,127.9 }, { 218.2,112.7 }, { 212.5,97.06 }, { 208.2,82.01 }, { 205.4,67.97 }, { 204,55.3 }, { 204.3,44.35 }, { 204.9,39.6 }, { 205.9,35.42 }, { 207.4,31.82 }, { 209.2,28.87 }, { 211.3,26.64}, { 213.8,25.13 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,3.024); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#ff8000"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#00ffff"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,48,points); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,12.02); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); (void) PixelSetColor(color,"none"); DrawSetFillColor(picasso,color); DrawArc(picasso,360,554.4,187.2,237.6,0,90); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,9); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#4000c2"); DrawSetFillColor(picasso,color); DrawEllipse(picasso,388.8,626.4,100.8,122.4,0,90); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[6] = { { 180,504 }, { 282.7,578.6 }, { 243.5,699.4 }, { 116.5,699.4 }, { 77.26,578.6 }, { 180,504 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,9); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#800000"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,6,points); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[11] = { { 180,504 }, { 211.8,568.3 }, { 282.7,578.6 }, { 231.3,628.7 }, { 243.5,699.4 }, { 180,666 }, { 116.5,699.4 }, { 128.7,628.7 }, { 77.26,578.6 }, { 148.2,568.3 }, { 180,504 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,9); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#800000"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,11,points); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[15] = { { 540,288 }, { 561.6,216 }, { 547.2,43.2 }, { 280.8,36 }, { 302.4,194.4 }, { 331.2,64.8 }, { 504,64.8 }, { 475.2,115.2 }, { 525.6,93.6 }, { 496.8,158.4 }, { 532.8,136.8 }, { 518.4,180 }, { 540,172.8 }, { 540,223.2 }, { 540,288 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,5.976); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#ffff00"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,15,points); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[7] = { { 57.6,640.8 }, { 57.6,784.8 }, { 194.4,799.2 }, { 259.2,777.6 }, { 151.2,756 }, { 86.4,748.8 }, { 57.6,640.8 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,5.976); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#ffff00"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,7,points); } (void) PopDrawingWand(picasso); (void) PushDrawingWand(picasso); { const PointInfo points[193] = { { 27.86,565.3 }, { 29.66,550.8 }, { 31.97,538.1 }, { 34.85,527.1 }, { 38.09,517.7 }, { 41.83,509.8 }, { 45.86,503.1 }, { 50.33,497.6 }, { 55.08,493.2 }, { 60.19,489.8 }, { 65.45,487.3 }, { 70.92,485.4 }, { 76.61,484.2 }, { 88.42,483 }, { 100.4,482.9 }, { 108.4,482.2 }, { 119.8,480.3 }, { 150.8,474.1 }, { 189.4,466.6 }, { 210.3,463 }, { 231.5,459.9 }, { 252.4,457.8 }, { 272.7,456.6 }, { 291.8,456.9 }, { 300.7,457.7 }, { 309.1,458.9 }, { 316.9,460.6 }, { 324.1,462.8 }, { 330.7,465.6 }, { 336.4,469 }, { 341.3,473 }, { 345.3,477.7 }, { 348.4,483.1 }, { 350.4,489.2}, { 352.4,495.4 }, { 355.2,500.9 }, { 358.8,505.8 }, { 363,510 }, { 367.8,513.6 }, { 373,516.8 }, { 378.6,519.6 }, { 384.3,521.8 }, { 396.4,525.4 }, { 408.2,527.9 }, { 428,531.2 }, { 434.6,532.9 }, { 436.7,533.8 }, { 437.8,534.9 }, { 437.8,536.2 }, { 436.8,537.8 }, { 434.5,539.6 }, { 430.9,541.8 }, { 419.3,547.6 }, { 401.3,555.2 }, { 342.4,577.9 }, {325.2,584.9 }, { 311,591.3 }, { 300,597.3 }, { 291.6,602.8 }, { 285.8,607.8 }, { 282.3,612.3 }, { 281.4,614.4 }, { 280.9,616.2 }, { 281.2,619.6 }, { 282.1,621.2 }, { 283.3,622.6 }, { 286.8,624.9 }, { 291.5,626.6 }, { 297.1,627.8 }, { 303.6,628.3 }, { 310.5,628.3 }, { 317.9,627.6 }, { 325.2,626.3 }, { 332.6,624.3 }, { 339.5,621.7 }, { 345.9,618.4 }, { 351.4,614.4 }, { 353.9,612.2 }, { 356,609.8 }, { 357.9,607.1 }, { 359.4,604.3 }, { 360.6,601.3 }, { 361.4,598.2 }, { 361.7,594.9 }, { 361.7,591.3 }, { 361.2,587.7 }, { 360.1,583.7 }, { 358.6,579.7 }, { 356.4,575.4 }, { 353.7,570.9 }, { 350.4,566.2 }, { 346.4,561.3 }, { 341.8,556.2 }, { 336.5,550.9 }, { 330.6,545.5 }, { 323.8,539.8 }, { 316.2,533.9 }, { 298.7,521.5 }, { 277.8,508.2 }, { 256.1,495.5 }, { 236,484.5 }, { 217.7,475.1 }, { 200.8,467.1 }, { 185.6,460.7 }, { 171.9,455.5 }, { 159.6,451.6 }, { 148.6,448.8 }, { 139,447 }, { 130.5,446.2 }, { 123.3,446.2 }, { 117.1,446.9 }, { 112,448.3 }, { 107.9,450.2 }, { 104.8,452.5 }, { 102.5,455.2 }, { 101,458.1 }, { 100.2,461.2 }, { 100.2,464.3 }, { 100.7,467.4 }, { 101.8,470.3 }, { 103.4,473 }, { 105.4,475.3 }, { 107.8,477.1 }, { 110.5,478.4 }, { 113.4,479.1 }, { 116.5,478.9 }, { 119.7,478 }, { 123,476.2 }, { 126.4,473.3 }, { 129.6,469.2 }, { 132.7,463.9 }, { 135.2,458.4 }, { 136.6,453.7 }, { 137,449.9 }, { 136.6,446.8 }, { 135.4,444.5 }, { 133.3,442.9 }, { 130.8,441.9 }, { 127.5,441.4 }, { 123.9,441.6 }, { 119.8,442.3 }, { 110.7,445.1 }, { 101.1,449.5 }, { 91.37,455.2 }, { 82.37,461.9 }, { 74.66,469.2 }, { 71.57,473 }, { 68.98,476.8 }, { 67.03,480.7 }, { 65.81,484.4 }, { 65.45,488.2 }, { 65.95,491.7 }, { 67.46,495.1 }, { 69.98,498.3 }, { 73.66,501.3 }, { 78.55,503.9 }, { 84.82,506.3 }, { 92.38,508.2 }, { 107.1,511.6 }, { 118.2,514.8 }, { 125.9,517.8 }, { 130.7,520.4 }, { 132.1,521.7 }, { 132.8,522.9 }, { 133,524.2 }, { 132.6,525.3 }, { 131.8,526.5 }, { 130.5,527.5 }, { 126.6,529.6 }, { 121.5,531.7 }, { 115.3,533.7 }, { 101.4,537.6 }, { 87.55,541.8 }, { 81.36,544 }, { 76.25,546.3 }, { 71.64,549.5 }, { 66.89,554.1 }, { 62.14,559.8 }, { 57.38,566.1 }, { 48.17,579.6 }, { 39.96,591.4 }, { 36.43,595.9 }, { 34.78,597.6 }, { 33.26,598.8 }, { 31.9,599.6 }, { 30.67,599.9 }, { 29.59,599.7 }, { 28.66,598.8 }, { 27.86,597.4 }, { 27.29,595.2 }, { 26.64,588.7 }, { 26.86,578.8 }, { 27.86,565.3 } }; DrawSetStrokeAntialias(picasso,MagickTrue); DrawSetStrokeWidth(picasso,5.904); DrawSetStrokeLineCap(picasso,RoundCap); DrawSetStrokeLineJoin(picasso,RoundJoin); (void) DrawSetStrokeDashArray(picasso,0,(const double *)NULL); (void) PixelSetColor(color,"#4000c2"); DrawSetStrokeColor(picasso,color); DrawSetFillRule(picasso,EvenOddRule); (void) PixelSetColor(color,"#ffff00"); DrawSetFillColor(picasso,color); DrawPolygon(picasso,193,points); } (void) PopDrawingWand(picasso); } (void) PopDrawingWand(picasso); } (void) PopDrawingWand(picasso); (void) MagickDrawImage(canvas,picasso); color=DestroyPixelWand(color); picasso=DestroyDrawingWand(picasso); return(MagickTrue); } int main(int argc,char **argv) { char filename[MaxTextExtent]; MagickBooleanType status; MagickWand *canvas; if (argc != 2) { (void) printf ("Usage: %s filename\n",argv[0]); exit(1); } (void) CopyMagickString(filename,argv[1],MaxTextExtent); /* Create canvas image. */ MagickWandGenesis(); canvas=NewMagickWand(); status=MagickSetSize(canvas,596,842); if (status == MagickFalse) ThrowWandException(canvas); status=MagickReadImage(canvas,"xc:white"); if (status == MagickFalse) ThrowWandException(canvas); /* Scribble on image. */ status=ScribbleImage(canvas); if (status == MagickFalse) ThrowWandException(canvas); /* Set pixel depth to 8. */ status=MagickSetImageDepth(canvas,8); if (status == MagickFalse) ThrowWandException(canvas); /* Set RLE compression. */ status=MagickSetImageCompression(canvas,RLECompression); if (status == MagickFalse) ThrowWandException(canvas); /* Save image to file. */ status=MagickWriteImage(canvas,filename); if (status == MagickFalse) ThrowWandException(canvas); canvas=DestroyMagickWand(canvas); MagickWandTerminus(); return(0); }
monich/ImageMagick
tests/drawtest.c
C
apache-2.0
20,314
.splunk-toolkit-punchcard .axis path,.axis line { fill: none; stroke: #b6b6b6; shape-rendering: crispEdges; } .splunk-toolkit-punchcard .tick text { fill: #999; } .splunk-toolkit-punchcard g.dimension.active { cursor: pointer; } .splunk-toolkit-punchcard text.label { font-size: 12px; cursor: pointer; } .splunk-toolkit-punchcard text.value { font-size: 11px; } .splunk-toolkit-punchcard svg { margin: 0px auto; display: block; }
splunk/splunk-webframework-toolkit
splunk_wftoolkit/django/splunk_wftoolkit/static/splunk_wftoolkit/components/punchcard/punchcard.css
CSS
apache-2.0
450
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/external/gtest.h> #include <aws/core/utils/logging/DefaultLogSystem.h> #include <aws/core/utils/logging/LogMacros.h> #include <aws/core/utils/logging/CRTLogging.h> #include <aws/core/utils/logging/CRTLogSystem.h> #include <aws/core/utils/Array.h> #include <aws/core/utils/memory/AWSMemory.h> #include <aws/core/utils/StringUtils.h> #include <cstdarg> #include <thread> using namespace Aws::Utils; using namespace Aws::Utils::Logging; static const char* AllocationTag = "LoggingTests"; class ScopedLogger { public: ScopedLogger(const std::shared_ptr<LogSystemInterface>& logger) { Aws::Utils::Logging::PushLogger(logger); } ~ScopedLogger() { Aws::Utils::Logging::PopLogger(); } }; class MockCRTLogSystem : public DefaultCRTLogSystem { public: MockCRTLogSystem(LogLevel logLevel, std::shared_ptr<Aws::StringStream> crtLogs) : DefaultCRTLogSystem(logLevel), m_localLogs(crtLogs) {} void Log(LogLevel logLevel, const char* subjectName, const char* formatStr, va_list args) override { va_list tmp_args; va_copy(tmp_args, args); #ifdef _WIN32 const int requiredLength = _vscprintf(formatStr, tmp_args) + 1; #else const int requiredLength = vsnprintf(nullptr, 0, formatStr, tmp_args) + 1; #endif va_end(tmp_args); Array<char> outputBuff(requiredLength); #ifdef _WIN32 vsnprintf_s(outputBuff.GetUnderlyingData(), requiredLength, _TRUNCATE, formatStr, args); #else vsnprintf(outputBuff.GetUnderlyingData(), requiredLength, formatStr, args); #endif // WIN32 Aws::OStringStream logStream; logStream << outputBuff.GetUnderlyingData(); *m_localLogs << outputBuff.GetUnderlyingData() << std::endl; Logging::GetLogSystem()->LogStream(logLevel, subjectName, logStream); } private: std::shared_ptr<Aws::StringStream> m_localLogs; }; class ScopedCRTLogger { public: ScopedCRTLogger(const std::shared_ptr<CRTLogSystemInterface>& crtLogger) { Aws::Utils::Logging::ShutdownCRTLogging(); Aws::Utils::Logging::InitializeCRTLogging(crtLogger); } ~ScopedCRTLogger() { Aws::Utils::Logging::ShutdownCRTLogging(); Aws::Utils::Logging::InitializeCRTLogging(Aws::MakeShared<Aws::Utils::Logging::DefaultCRTLogSystem>(AllocationTag, Aws::Utils::Logging::LogLevel::Trace)); } }; void LogAllPossibilities(const char* tag) { AWS_LOG_FATAL(tag, "test fatal level"); AWS_LOG_FATAL(tag, "test %s format level", "fatal"); AWS_LOGSTREAM_FATAL(tag, "test " << "fatal " << "stream level" ); AWS_LOG_ERROR(tag, "test error level"); AWS_LOG_ERROR(tag, "test %s format level", "error"); AWS_LOGSTREAM_ERROR(tag, "test " << "error " << "stream level" ); AWS_LOG_WARN(tag, "test warn level"); AWS_LOG_WARN(tag, "test %s format level", "warn"); AWS_LOGSTREAM_WARN(tag, "test " << "warn " << "stream level" ); AWS_LOG_INFO(tag, "test info level"); AWS_LOG_INFO(tag, "test %s format level", "info"); AWS_LOGSTREAM_INFO(tag, "test " << "info " << "stream level" ); AWS_LOG_DEBUG(tag, "test debug level"); AWS_LOG_DEBUG(tag, "test %s format level", "debug"); AWS_LOGSTREAM_DEBUG(tag, "test " << "debug " << "stream level" ); AWS_LOG_TRACE(tag, "test trace level"); AWS_LOG_TRACE(tag, "test %s format level", "trace"); AWS_LOGSTREAM_TRACE(tag, "test " << "trace " << "stream level" ); AWS_LOG_FLUSH(); AWS_LOGSTREAM_FLUSH(); } void VerifyAllLogsAtOrBelow(LogLevel logLevel, const Aws::String& tag, const Aws::Vector<Aws::String>& loggedStatements) { static const uint32_t STATEMENTS_PER_LEVEL = 3; uint32_t expectedLogLevels = static_cast<uint32_t>(logLevel); uint32_t expectedStatementCount = expectedLogLevels * STATEMENTS_PER_LEVEL; ASSERT_EQ(expectedStatementCount, loggedStatements.size()); for(uint32_t i = 0; i < expectedLogLevels; ++i) { LogLevel currentLevel = static_cast<LogLevel>(i + 1); Aws::String levelTag = "[" + GetLogLevelName(currentLevel) + "]"; for(uint32_t j = 0; j < STATEMENTS_PER_LEVEL; ++j) { uint32_t statementIndex = i * STATEMENTS_PER_LEVEL + j; ASSERT_TRUE(loggedStatements[statementIndex].find(levelTag) != Aws::String::npos); ASSERT_TRUE(loggedStatements[statementIndex].find(tag) != Aws::String::npos); } Aws::String logText1 = "test " + StringUtils::ToLower(GetLogLevelName(currentLevel).c_str()) + " level"; ASSERT_TRUE(loggedStatements[i * STATEMENTS_PER_LEVEL].find(logText1) != Aws::String::npos); Aws::String logText2 = "test " + StringUtils::ToLower(GetLogLevelName(currentLevel).c_str()) + " format level"; ASSERT_TRUE(loggedStatements[i * STATEMENTS_PER_LEVEL + 1].find(logText2) != Aws::String::npos); Aws::String logText3 = "test " + StringUtils::ToLower(GetLogLevelName(currentLevel).c_str()) + " stream level"; ASSERT_TRUE(loggedStatements[i * STATEMENTS_PER_LEVEL + 2].find(logText3) != Aws::String::npos); } } void DoLogTest(LogLevel logLevel, const char *testTag) { auto ss = Aws::MakeShared<Aws::StringStream>(AllocationTag); { ScopedLogger loggingScope(Aws::MakeShared<DefaultLogSystem>(AllocationTag, logLevel, ss)); LogAllPossibilities(testTag); } Aws::Vector<Aws::String> loggedStatements = StringUtils::SplitOnLine(ss->str()); VerifyAllLogsAtOrBelow(logLevel, testTag, loggedStatements); } TEST(LoggingTest, testFatalLogLevel) { DoLogTest(LogLevel::Fatal, "LoggingTest_testFatalLogLevel"); } TEST(LoggingTest, testErrorLogLevel) { DoLogTest(LogLevel::Error, "LoggingTest_testErrorLogLevel"); } TEST(LoggingTest, testWarnLogLevel) { DoLogTest(LogLevel::Warn, "LoggingTest_testWarnLogLevel"); } TEST(LoggingTest, testInfoLogLevel) { DoLogTest(LogLevel::Info, "LoggingTest_testInfoLogLevel"); } TEST(LoggingTest, testDebugLogLevel) { DoLogTest(LogLevel::Debug, "LoggingTest_testDebugLogLevel"); } TEST(LoggingTest, testTraceLogLevel) { DoLogTest(LogLevel::Trace, "LoggingTest_testTraceLogLevel"); } void CRTLogAllPossibilities() { AWS_LOGF_FATAL(AWS_LS_COMMON_GENERAL, "test fatal level"); AWS_LOGF_FATAL(AWS_LS_COMMON_GENERAL, "test %s format level", "fatal"); AWS_LOGF_ERROR(AWS_LS_COMMON_GENERAL, "test error level"); AWS_LOGF_ERROR(AWS_LS_COMMON_GENERAL, "test %s format level", "error"); AWS_LOGF_WARN(AWS_LS_COMMON_GENERAL, "test warn level"); AWS_LOGF_WARN(AWS_LS_COMMON_GENERAL, "test %s format level", "warn"); AWS_LOGF_INFO(AWS_LS_COMMON_GENERAL, "test info level"); AWS_LOGF_INFO(AWS_LS_COMMON_GENERAL, "test %s format level", "info"); AWS_LOGF_DEBUG(AWS_LS_COMMON_GENERAL, "test debug level"); AWS_LOGF_DEBUG(AWS_LS_COMMON_GENERAL, "test %s format level", "debug"); AWS_LOGF_TRACE(AWS_LS_COMMON_GENERAL, "test trace level"); AWS_LOGF_TRACE(AWS_LS_COMMON_GENERAL, "test %s format level", "trace"); AWS_LOG_FLUSH(); AWS_LOGSTREAM_FLUSH(); } void VerifyAllCRTLogsAtOrBelow(LogLevel logLevel, const Aws::Vector<Aws::String>& loggedStatements, const Aws::Vector<Aws::String>& crtLoggedStatements) { static const uint32_t STATEMENTS_PER_LEVEL = 2; uint32_t expectedLogLevels = static_cast<uint32_t>(logLevel); uint32_t expectedStatementCount = expectedLogLevels * STATEMENTS_PER_LEVEL; ASSERT_EQ(expectedStatementCount, loggedStatements.size()); ASSERT_EQ(expectedStatementCount, crtLoggedStatements.size()); for(uint32_t i = 0; i < expectedLogLevels; ++i) { LogLevel currentLevel = static_cast<LogLevel>(i + 1); Aws::String levelTag = "[" + GetLogLevelName(currentLevel) + "]"; for(uint32_t j = 0; j < STATEMENTS_PER_LEVEL; ++j) { uint32_t statementIndex = i * STATEMENTS_PER_LEVEL + j; ASSERT_TRUE(loggedStatements[statementIndex].find(levelTag) != Aws::String::npos); ASSERT_TRUE(loggedStatements[statementIndex].find("aws-c-common") != Aws::String::npos); } Aws::String logText1 = "test " + StringUtils::ToLower(GetLogLevelName(currentLevel).c_str()) + " level"; ASSERT_TRUE(loggedStatements[i * STATEMENTS_PER_LEVEL].find(logText1) != Aws::String::npos); ASSERT_TRUE(crtLoggedStatements[i * STATEMENTS_PER_LEVEL].find(logText1) != Aws::String::npos); Aws::String logText2 = "test " + StringUtils::ToLower(GetLogLevelName(currentLevel).c_str()) + " format level"; ASSERT_TRUE(loggedStatements[i * STATEMENTS_PER_LEVEL + 1].find(logText2) != Aws::String::npos); ASSERT_TRUE(crtLoggedStatements[i * STATEMENTS_PER_LEVEL + 1].find(logText2) != Aws::String::npos); } } void DoCRTLogTest(LogLevel logLevel) { auto logs = Aws::MakeShared<Aws::StringStream>(AllocationTag); auto crtLogs = Aws::MakeShared<Aws::StringStream>(AllocationTag); { ScopedLogger loggingScope(Aws::MakeShared<DefaultLogSystem>(AllocationTag, logLevel, logs)); ScopedCRTLogger crtLoggingScope(Aws::MakeShared<MockCRTLogSystem>(AllocationTag, logLevel, crtLogs)); CRTLogAllPossibilities(); } Aws::Vector<Aws::String> loggedStatements = StringUtils::SplitOnLine(logs->str()); Aws::Vector<Aws::String> crtLoggedStatements = StringUtils::SplitOnLine(crtLogs->str()); VerifyAllCRTLogsAtOrBelow(logLevel, loggedStatements, crtLoggedStatements); } TEST(CRTLoggingTest, testFatalLogLevel) { DoCRTLogTest(LogLevel::Fatal); } TEST(CRTLoggingTest, testWarnLogLevel) { DoCRTLogTest(LogLevel::Warn); } TEST(CRTLoggingTest, testInfoLogLevel) { DoCRTLogTest(LogLevel::Info); } TEST(CRTLoggingTest, testDebugLogLevel) { DoCRTLogTest(LogLevel::Debug); } TEST(CRTLoggingTest, testTraceLogLevel) { DoCRTLogTest(LogLevel::Trace); }
aws/aws-sdk-cpp
aws-cpp-sdk-core-tests/utils/logging/LoggingTest.cpp
C++
apache-2.0
10,078
// This file is part of libfringe, a low-level green threading library. // Copyright (c) whitequark <whitequark@whitequark.org>, // Nathan Zadoks <nathan@nathan7.eu> // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. extern crate fringe; use fringe::{SliceStack, OwnedStack, OsStack}; use fringe::generator::{Generator, Yielder}; fn add_one_fn(yielder: &mut Yielder<i32, i32>, mut input: i32) { loop { if input == 0 { break } input = yielder.suspend(input + 1) } } fn new_add_one() -> Generator<i32, i32, OsStack> { let stack = OsStack::new(0).unwrap(); Generator::new(stack, add_one_fn) } #[test] fn generator() { let mut add_one = new_add_one(); assert_eq!(add_one.resume(1), Some(2)); assert_eq!(add_one.resume(2), Some(3)); assert_eq!(add_one.resume(0), None); } #[test] fn move_after_new() { let mut add_one = new_add_one(); assert_eq!(add_one.resume(1), Some(2)); #[inline(never)] fn run_moved(mut add_one: Generator<i32, i32, OsStack>) { assert_eq!(add_one.resume(2), Some(3)); assert_eq!(add_one.resume(3), Some(4)); assert_eq!(add_one.resume(0), None); } run_moved(add_one); } #[test] #[should_panic] fn panic_safety() { struct Wrapper { gen: Generator<(), (), OsStack> } impl Drop for Wrapper { fn drop(&mut self) { self.gen.resume(()); } } let stack = OsStack::new(4 << 20).unwrap(); let gen = Generator::new(stack, move |_yielder, ()| { panic!("foo") }); let mut wrapper = Wrapper { gen: gen }; wrapper.gen.resume(()); } #[test] fn with_slice_stack() { let mut memory = [0; 1024]; let stack = SliceStack(&mut memory); let mut add_one = unsafe { Generator::unsafe_new(stack, add_one_fn) }; assert_eq!(add_one.resume(1), Some(2)); assert_eq!(add_one.resume(2), Some(3)); } #[test] fn with_owned_stack() { let stack = OwnedStack::new(1024); let mut add_one = unsafe { Generator::unsafe_new(stack, add_one_fn) }; assert_eq!(add_one.resume(1), Some(2)); assert_eq!(add_one.resume(2), Some(3)); } #[test] fn forget_yielded() { struct Dropper(*mut bool); unsafe impl Send for Dropper {} impl Drop for Dropper { fn drop(&mut self) { unsafe { if *self.0 { panic!("double drop!") } *self.0 = true; } } } let stack = fringe::OsStack::new(1<<16).unwrap(); let mut generator = Generator::new(stack, |yielder, ()| { let mut flag = false; yielder.suspend(Dropper(&mut flag as *mut bool)); }); generator.resume(()); generator.resume(()); }
QuiltOS/libfringe
tests/generator.rs
Rust
apache-2.0
2,795
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include <aws/comprehendmedical/ComprehendMedical_EXPORTS.h> #include <aws/comprehendmedical/ComprehendMedicalRequest.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <utility> namespace Aws { namespace ComprehendMedical { namespace Model { /** */ class AWS_COMPREHENDMEDICAL_API DescribePHIDetectionJobRequest : public ComprehendMedicalRequest { public: DescribePHIDetectionJobRequest(); // Service request name is the Operation name which will send this request out, // each operation should has unique request name, so that we can get operation's name from this request. // Note: this is not true for response, multiple operations may have the same response name, // so we can not get operation's name from response. inline virtual const char* GetServiceRequestName() const override { return "DescribePHIDetectionJob"; } Aws::String SerializePayload() const override; Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override; /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline const Aws::String& GetJobId() const{ return m_jobId; } /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline bool JobIdHasBeenSet() const { return m_jobIdHasBeenSet; } /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline void SetJobId(const Aws::String& value) { m_jobIdHasBeenSet = true; m_jobId = value; } /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline void SetJobId(Aws::String&& value) { m_jobIdHasBeenSet = true; m_jobId = std::move(value); } /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline void SetJobId(const char* value) { m_jobIdHasBeenSet = true; m_jobId.assign(value); } /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline DescribePHIDetectionJobRequest& WithJobId(const Aws::String& value) { SetJobId(value); return *this;} /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline DescribePHIDetectionJobRequest& WithJobId(Aws::String&& value) { SetJobId(std::move(value)); return *this;} /** * <p>The identifier that Amazon Comprehend Medical generated for the job. The * <code>StartPHIDetectionJob</code> operation returns this identifier in its * response.</p> */ inline DescribePHIDetectionJobRequest& WithJobId(const char* value) { SetJobId(value); return *this;} private: Aws::String m_jobId; bool m_jobIdHasBeenSet; }; } // namespace Model } // namespace ComprehendMedical } // namespace Aws
awslabs/aws-sdk-cpp
aws-cpp-sdk-comprehendmedical/include/aws/comprehendmedical/model/DescribePHIDetectionJobRequest.h
C
apache-2.0
3,686
import { triggerPixel, parseSizesInput, deepAccess, logError, getGptSlotInfoForAdUnitCode } from '../src/utils.js'; import { registerBidder } from '../src/adapters/bidderFactory.js'; import { config } from '../src/config.js'; import { BANNER, VIDEO } from '../src/mediaTypes.js'; import { INSTREAM as VIDEO_INSTREAM } from '../src/video.js'; const BIDDER_CODE = 'visx'; const GVLID = 154; const BASE_URL = 'https://t.visx.net'; const DEBUG_URL = 'https://t-stage.visx.net'; const ENDPOINT_PATH = '/hb_post'; const TIME_TO_LIVE = 360; const DEFAULT_CUR = 'EUR'; const ADAPTER_SYNC_PATH = '/push_sync'; const TRACK_TIMEOUT_PATH = '/track/bid_timeout'; const LOG_ERROR_MESS = { noAuid: 'Bid from response has no auid parameter - ', noAdm: 'Bid from response has no adm parameter - ', noBid: 'Array of bid objects is empty', noImpId: 'Bid from response has no impid parameter - ', noPlacementCode: 'Can\'t find in requested bids the bid with auid - ', emptyUids: 'Uids should not be empty', emptySeatbid: 'Seatbid array from response has an empty item', emptyResponse: 'Response is empty', hasEmptySeatbidArray: 'Response has empty seatbid array', hasNoArrayOfBids: 'Seatbid from response has no array of bid objects - ', notAllowedCurrency: 'Currency is not supported - ', currencyMismatch: 'Currency from the request is not match currency from the response - ', onlyVideoInstream: `Only video ${VIDEO_INSTREAM} supported`, videoMissing: 'Bid request videoType property is missing - ' }; const currencyWhiteList = ['EUR', 'USD', 'GBP', 'PLN']; export const spec = { code: BIDDER_CODE, gvlid: GVLID, supportedMediaTypes: [BANNER, VIDEO], isBidRequestValid: function(bid) { if (_isVideoBid(bid)) { if (!_isValidVideoBid(bid, true)) { // in case if video bid configuration invalid will try to send bid request for banner if (!_isBannerBid(bid)) { return false; } } } return !!bid.params.uid && !isNaN(parseInt(bid.params.uid)); }, buildRequests: function(validBidRequests, bidderRequest) { const auids = []; const bidsMap = {}; const bids = validBidRequests || []; const currency = config.getConfig(`currency.bidderCurrencyDefault.${BIDDER_CODE}`) || config.getConfig('currency.adServerCurrency') || DEFAULT_CUR; let reqId; let payloadSchain; let payloadUserId; let payloadUserEids; let timeout; if (currencyWhiteList.indexOf(currency) === -1) { logError(LOG_ERROR_MESS.notAllowedCurrency + currency); return; } const imp = []; bids.forEach(bid => { reqId = bid.bidderRequestId; const impObj = buildImpObject(bid); if (impObj) { imp.push(impObj); bidsMap[bid.bidId] = bid; } const { params: { uid }, schain, userId, userIdAsEids } = bid; if (!payloadSchain && schain) { payloadSchain = schain; } if (!payloadUserEids && userIdAsEids) { payloadUserEids = userIdAsEids; } if (!payloadUserId && userId) { payloadUserId = userId; } auids.push(uid); }); const payload = {}; if (bidderRequest) { timeout = bidderRequest.timeout; if (bidderRequest.refererInfo && bidderRequest.refererInfo.referer) { payload.u = bidderRequest.refererInfo.referer; } if (bidderRequest.gdprConsent) { if (bidderRequest.gdprConsent.consentString) { payload.gdpr_consent = bidderRequest.gdprConsent.consentString; } payload.gdpr_applies = (typeof bidderRequest.gdprConsent.gdprApplies === 'boolean') ? Number(bidderRequest.gdprConsent.gdprApplies) : 1; } } const bidderTimeout = Number(config.getConfig('bidderTimeout')) || timeout; const tmax = timeout ? Math.min(bidderTimeout, timeout) : bidderTimeout; const source = { ext: { wrapperType: 'Prebid_js', wrapperVersion: '$prebid.version$', ...(payloadSchain && { schain: payloadSchain }) } }; const user = { ext: { ...(payloadUserEids && { eids: payloadUserEids }), ...(payload.gdpr_consent && { consent: payload.gdpr_consent }) } }; const regs = ('gdpr_applies' in payload) && { ext: { gdpr: payload.gdpr_applies } }; const request = { id: reqId, imp, tmax, cur: [currency], source, site: { page: payload.u }, ...(Object.keys(user.ext).length && { user }), ...(regs && { regs }) }; return { method: 'POST', url: buildUrl(ENDPOINT_PATH) + '?auids=' + encodeURIComponent(auids.join(',')), data: request, bidsMap }; }, interpretResponse: function(serverResponse, bidRequest) { serverResponse = serverResponse && serverResponse.body; const bidResponses = []; const bidsMap = bidRequest.bidsMap; const currency = bidRequest.data.cur[0]; let errorMessage; if (!serverResponse) errorMessage = LOG_ERROR_MESS.emptyResponse; else if (serverResponse.seatbid && !serverResponse.seatbid.length) { errorMessage = LOG_ERROR_MESS.hasEmptySeatbidArray; } if (!errorMessage && serverResponse.seatbid) { serverResponse.seatbid.forEach(respItem => { _addBidResponse(_getBidFromResponse(respItem), bidsMap, currency, bidResponses); }); } if (errorMessage) logError(errorMessage); return bidResponses; }, getUserSyncs: function(syncOptions, serverResponses, gdprConsent) { var query = []; if (gdprConsent) { if (gdprConsent.consentString) { query.push('gdpr_consent=' + encodeURIComponent(gdprConsent.consentString)); } query.push('gdpr_applies=' + encodeURIComponent( (typeof gdprConsent.gdprApplies === 'boolean') ? Number(gdprConsent.gdprApplies) : 1)); } if (syncOptions.iframeEnabled) { return [{ type: 'iframe', url: buildUrl(ADAPTER_SYNC_PATH) + '?iframe=1' + (query.length ? '&' + query.join('&') : '') }]; } else if (syncOptions.pixelEnabled) { return [{ type: 'image', url: buildUrl(ADAPTER_SYNC_PATH) + (query.length ? '?' + query.join('&') : '') }]; } }, onSetTargeting: function(bid) { // Call '/track/pending' with the corresponding bid.requestId if (bid.ext && bid.ext.events && bid.ext.events.pending) { triggerPixel(bid.ext.events.pending); } }, onBidWon: function(bid) { // Call '/track/win' with the corresponding bid.requestId if (bid.ext && bid.ext.events && bid.ext.events.win) { triggerPixel(bid.ext.events.win); } }, onTimeout: function(timeoutData) { // Call '/track/bid_timeout' with timeout data timeoutData.forEach(({ params }) => { if (params) { params.forEach((item) => { if (item && item.uid) { item.uid = parseInt(item.uid); } }); } }); triggerPixel(buildUrl(TRACK_TIMEOUT_PATH) + '//' + JSON.stringify(timeoutData)); } }; function buildUrl(path) { return (config.getConfig('devMode') ? DEBUG_URL : BASE_URL) + path; } function makeBanner(bannerParams) { const bannerSizes = bannerParams && bannerParams.sizes; if (bannerSizes) { const sizes = parseSizesInput(bannerSizes); if (sizes.length) { const format = sizes.map(size => { const [ width, height ] = size.split('x'); const w = parseInt(width, 10); const h = parseInt(height, 10); return { w, h }; }); return { format }; } } } function makeVideo(videoParams = {}) { const video = Object.keys(videoParams).filter((param) => param !== 'context' && param !== 'playerSize') .reduce((result, param) => { result[param] = videoParams[param]; return result; }, { w: deepAccess(videoParams, 'playerSize.0.0'), h: deepAccess(videoParams, 'playerSize.0.1') }); if (video.w && video.h) { return video; } } function buildImpObject(bid) { const { params: { uid }, bidId, mediaTypes, sizes, adUnitCode } = bid; const video = mediaTypes && _isVideoBid(bid) && _isValidVideoBid(bid) && makeVideo(mediaTypes.video); const banner = makeBanner((mediaTypes && mediaTypes.banner) || (!video && { sizes })); const impObject = { id: bidId, ...(banner && { banner }), ...(video && { video }), ext: { bidder: { uid: parseInt(uid) }, } }; if (impObject.banner) { impObject.ext.bidder.adslotExists = _isAdSlotExists(adUnitCode); } if (impObject.ext.bidder.uid && (impObject.banner || impObject.video)) { return impObject; } } function _getBidFromResponse(respItem) { if (!respItem) { logError(LOG_ERROR_MESS.emptySeatbid); } else if (!respItem.bid) { logError(LOG_ERROR_MESS.hasNoArrayOfBids + JSON.stringify(respItem)); } else if (!respItem.bid[0]) { logError(LOG_ERROR_MESS.noBid); } return respItem && respItem.bid && respItem.bid[0]; } function _addBidResponse(serverBid, bidsMap, currency, bidResponses) { if (!serverBid) return; let errorMessage; if (!serverBid.auid) errorMessage = LOG_ERROR_MESS.noAuid + JSON.stringify(serverBid); if (!serverBid.impid) errorMessage = LOG_ERROR_MESS.noImpId + JSON.stringify(serverBid); if (!serverBid.adm) errorMessage = LOG_ERROR_MESS.noAdm + JSON.stringify(serverBid); else { const reqCurrency = currency || DEFAULT_CUR; const bid = bidsMap[serverBid.impid]; if (bid) { if (serverBid.cur && serverBid.cur !== reqCurrency) { errorMessage = LOG_ERROR_MESS.currencyMismatch + reqCurrency + ' - ' + serverBid.cur; } else { const bidResponse = { requestId: bid.bidId, cpm: serverBid.price, width: serverBid.w, height: serverBid.h, creativeId: serverBid.auid, currency: reqCurrency, netRevenue: true, ttl: TIME_TO_LIVE, dealId: serverBid.dealid, meta: { advertiserDomains: serverBid.advertiserDomains ? serverBid.advertiserDomains : [], mediaType: serverBid.mediaType }, }; if (serverBid.ext && serverBid.ext.prebid) { bidResponse.ext = serverBid.ext.prebid; } const visxTargeting = deepAccess(serverBid, 'ext.prebid.targeting'); if (visxTargeting) { bidResponse.adserverTargeting = visxTargeting; } if (!_isVideoInstreamBid(bid)) { bidResponse.ad = serverBid.adm; } else { bidResponse.vastXml = serverBid.adm; bidResponse.mediaType = 'video'; } bidResponses.push(bidResponse); } } else { errorMessage = LOG_ERROR_MESS.noPlacementCode + serverBid.auid; } } if (errorMessage) { logError(errorMessage); } } function _isVideoBid(bid) { return bid.mediaType === VIDEO || deepAccess(bid, 'mediaTypes.video'); } function _isVideoInstreamBid(bid) { return _isVideoBid(bid) && deepAccess(bid, 'mediaTypes.video', {}).context === VIDEO_INSTREAM; } function _isBannerBid(bid) { return bid.mediaType === BANNER || deepAccess(bid, 'mediaTypes.banner'); } function _isValidVideoBid(bid, logErrors = false) { let result = true; const videoMediaType = deepAccess(bid, 'mediaTypes.video'); if (!_isVideoInstreamBid(bid)) { if (logErrors) { logError(LOG_ERROR_MESS.onlyVideoInstream); } result = false; } if (!(videoMediaType.playerSize && parseSizesInput(deepAccess(videoMediaType, 'playerSize', [])))) { if (logErrors) { logError(LOG_ERROR_MESS.videoMissing + 'playerSize'); } result = false; } return result; } function _isAdSlotExists(adUnitCode) { if (document.getElementById(adUnitCode)) { return true; } const gptAdSlot = getGptSlotInfoForAdUnitCode(adUnitCode); if (gptAdSlot && gptAdSlot.divId && document.getElementById(gptAdSlot.divId)) { return true; } return false; } registerBidder(spec);
prebid/Prebid.js
modules/visxBidAdapter.js
JavaScript
apache-2.0
12,057
package com.huassit.imenu.android.http; import java.io.IOException; import java.util.List; import org.apache.http.NameValuePair; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Handler; import android.util.Log; import com.huassit.imenu.android.BaseActivity; import com.huassit.imenu.android.MyApplication; import com.huassit.imenu.android.service.MessageService; import com.huassit.imenu.android.ui.LoginActivity; import com.huassit.imenu.android.ui.MainActivity; import com.huassit.imenu.android.util.ActivityStackManager; import com.huassit.imenu.android.util.PreferencesUtils; import com.huassit.imenu.android.util.StringUtils; public abstract class BaseInvoker extends Thread { /** * 默认请求成功后的标识 * */ public static final int ON_REQUEST_SUCCESS = 1000; /** * 默认请求失败后的标识 * */ public static final int ON_REQUEST_FAILURE = 1001; /** * http 請求的方法枚舉 * */ public static enum HTTP_METHOD { METHOD_POST, METHOD_GET, METHOD_UPLOAD_IMAGE } /** * 上下文 * */ protected Context ctx; /** * 接受异步请求结果的handler * */ private Handler handler; /** * 成功标识 * */ protected int ON_SUCCESS = ON_REQUEST_SUCCESS; /** * 失败标识 * */ protected int ON_FAILURE = ON_REQUEST_FAILURE; /** * 请求时的参数列表 * */ public abstract List<NameValuePair> getParameters(); /** * 請求的API地址 * */ public abstract String getRequestUrl(); /** * 獲取請求的方式 * */ public abstract HTTP_METHOD getRequestMethod(); /** * 获取上传的文件* */ public NameValuePair getUploadFiles() { return null; } protected boolean isServerHasSession() { return false; } private HttpInvoker httpInvoker; public BaseInvoker(Context ctx, Handler handler) { this.ctx = ctx; this.handler = handler; httpInvoker = new HttpInvoker(); } public void cancel() { handler = null; httpInvoker.close(); } public void sendMessage(int what, Object obj) { if (handler != null) handler.sendMessage(handler.obtainMessage(what, obj)); } @Override public void run() { execute(); } private void execute() { try { String result = ""; if (getRequestMethod() == HTTP_METHOD.METHOD_GET) { result = httpInvoker.parseResponse(httpInvoker.doHttpGet(getRequestUrl())); } else if (getRequestMethod() == HTTP_METHOD.METHOD_POST) { Log.e("BaseInvoker", getParameters().toString()); if (isServerHasSession()) { if (MyApplication.httpClient != null) { httpInvoker.setHttpClient(MyApplication.httpClient); } httpInvoker.setHttpPost(MyApplication.httpPost); } result = httpInvoker.parseResponse(httpInvoker.doHttpPost(getRequestUrl(), getParameters())); if (isServerHasSession()) { MyApplication.httpClient = httpInvoker.getHttpClient(); MyApplication.httpPost = httpInvoker.getHttpPost(); } else { MyApplication.httpClient = null; MyApplication.httpPost = null; } Log.e("BaseInvoker", result); } else if (getRequestMethod() == HTTP_METHOD.METHOD_UPLOAD_IMAGE) { result = httpInvoker.parseResponse(httpInvoker.doImageUpload(getRequestUrl(), getParameters(), getUploadFiles())); } if (result.equals("")) { if (handler != null) handler.sendMessage(handler.obtainMessage(ON_FAILURE, "response null")); } else { try { JSONObject jsonObject = new JSONObject(result); if (!StringUtils.isBlank(jsonObject.optString("error"))){ if (jsonObject.optString("error").equals("12|错误:非法TOKEN!")) { PreferencesUtils.putString(ctx, "token", null); ctx.stopService(new Intent(ctx, MessageService.class)); if(ctx instanceof Activity){ ((Activity)ctx).runOnUiThread(new Runnable() { @Override public void run() { ((BaseActivity)ctx).showTokenDialog((Activity)ctx, "登录已过期,请重新登录", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //确定 Intent intent = new Intent(ctx, LoginActivity.class); ctx.startActivity(intent); } }); } }); } }else{ handleResponse(result); } }else{ handleResponse(result); } } catch (JSONException e) { e.printStackTrace(); } } } catch (IOException e) { e.printStackTrace(); if (handler != null) handler.sendMessage(handler.obtainMessage(ON_FAILURE, e.getMessage())); } } protected abstract void handleResponse(String response); /** * @param successCode 请求成功后的标识 * @param failCode 失败后的标识 * @Desc 异步调用, 请求时输入参数在handler的what中用来区分请求结果 * * */ synchronized public void asyncInvoke(int successCode, int failCode) { ON_SUCCESS = successCode; ON_FAILURE = failCode; super.start(); } synchronized public void asyncInvoke() { super.start(); } synchronized public void syncInvoke() { execute(); } }
kevin-duan/Imenu
iMenu/src/com/huassit/imenu/android/http/BaseInvoker.java
Java
apache-2.0
6,272
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.project; import com.google.common.base.MoreObjects; import com.google.common.base.Strings; import com.google.gerrit.common.ChangeHooks; import com.google.gerrit.extensions.api.projects.PutDescriptionInput; import com.google.gerrit.extensions.restapi.AuthException; import com.google.gerrit.extensions.restapi.ResourceConflictException; import com.google.gerrit.extensions.restapi.ResourceNotFoundException; import com.google.gerrit.extensions.restapi.Response; import com.google.gerrit.extensions.restapi.RestModifyView; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.client.RefNames; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.extensions.events.GitReferenceUpdated; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.git.MetaDataUpdate; import com.google.gerrit.server.git.ProjectConfig; import com.google.inject.Inject; import com.google.inject.Singleton; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.errors.RepositoryNotFoundException; import org.eclipse.jgit.lib.ObjectId; import java.io.IOException; import java.util.Objects; @Singleton public class PutDescription implements RestModifyView<ProjectResource, PutDescriptionInput> { private final ProjectCache cache; private final MetaDataUpdate.Server updateFactory; private final GitRepositoryManager gitMgr; private final ChangeHooks hooks; private final GitReferenceUpdated gitRefUpdated; @Inject PutDescription(ProjectCache cache, MetaDataUpdate.Server updateFactory, ChangeHooks hooks, GitReferenceUpdated gitRefUpdated, GitRepositoryManager gitMgr) { this.cache = cache; this.updateFactory = updateFactory; this.hooks = hooks; this.gitRefUpdated = gitRefUpdated; this.gitMgr = gitMgr; } @Override public Response<String> apply(ProjectResource resource, PutDescriptionInput input) throws AuthException, ResourceConflictException, ResourceNotFoundException, IOException { if (input == null) { input = new PutDescriptionInput(); // Delete would set description to null. } ProjectControl ctl = resource.getControl(); IdentifiedUser user = ctl.getUser().asIdentifiedUser(); if (!ctl.isOwner()) { throw new AuthException("not project owner"); } try (MetaDataUpdate md = updateFactory.create(resource.getNameKey())) { ProjectConfig config = ProjectConfig.read(md); Project project = config.getProject(); project.setDescription(Strings.emptyToNull(input.description)); String msg = MoreObjects.firstNonNull( Strings.emptyToNull(input.commitMessage), "Updated description.\n"); if (!msg.endsWith("\n")) { msg += "\n"; } md.setAuthor(user); md.setMessage(msg); ObjectId baseRev = config.getRevision(); ObjectId commitRev = config.commit(md); // Only fire hook if project was actually changed. if (!Objects.equals(baseRev, commitRev)) { gitRefUpdated.fire(resource.getNameKey(), RefNames.REFS_CONFIG, baseRev, commitRev); hooks.doRefUpdatedHook( new Branch.NameKey(resource.getNameKey(), RefNames.REFS_CONFIG), baseRev, commitRev, user.getAccount()); } cache.evict(ctl.getProject()); gitMgr.setProjectDescription( resource.getNameKey(), project.getDescription()); return Strings.isNullOrEmpty(project.getDescription()) ? Response.<String>none() : Response.ok(project.getDescription()); } catch (RepositoryNotFoundException notFound) { throw new ResourceNotFoundException(resource.getName()); } catch (ConfigInvalidException e) { throw new ResourceConflictException(String.format( "invalid project.config: %s", e.getMessage())); } } }
MerritCR/merrit
gerrit-server/src/main/java/com/google/gerrit/server/project/PutDescription.java
Java
apache-2.0
4,591
/** * Cobertura - http://cobertura.sourceforge.net/ * * Copyright (C) 2003 jcoverage ltd. * Copyright (C) 2005 Mark Doliner <thekingant@users.sourceforge.net> * * Cobertura is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation; either version 2 of the License, * or (at your option) any later version. * * Cobertura is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Cobertura; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA */ package simpleMavenProject; import static org.junit.Assert.*; import org.junit.Assert.*; import org.junit.*; import java.util.Collection; import java.util.LinkedList; public class SimpleTest { final Simple simple = new Simple(); @Test public void testSquare() { assertEquals(1, simple.square(1)); assertEquals(1, simple.square(-1)); } @Test public void testF() { assertEquals(1, simple.f(-1)); assertEquals(12, simple.f(6)); } @SuppressWarnings("unchecked") @Test public void testSum() { @SuppressWarnings("rawtypes") Collection c = new LinkedList(); c.add(new Integer(3)); c.add(new Integer(5)); c.add(new Integer(8)); assertEquals(16, simple.sum(c)); } }
SERG-Delft/operias
operias-report/src/test/resources/simpleMavenProject/src/test/java/simpleMavenProject/SimpleTest.java
Java
apache-2.0
1,573
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.hdfs.bolt.rotation; import backtype.storm.tuple.Tuple; import java.io.Serializable; /** * Used by the HdfsBolt to decide when to rotate files. * * The HdfsBolt will call the <code>mark()</code> method for every * tuple received. If the <code>mark()</code> method returns * <code>true</code> the HdfsBolt will perform a file rotation. * * After file rotation, the HdfsBolt will call the <code>reset()</code> * method. */ public interface FileRotationPolicy extends Serializable { /** * Called for every tuple the HdfsBolt executes. * * @param tuple The tuple executed. * @param offset current offset of file being written * @return true if a file rotation should be performed */ boolean mark(Tuple tuple, long offset); /** * Called after the HdfsBolt rotates a file. * */ void reset(); }
ptgoetz/storm-hdfs
src/main/java/org/apache/storm/hdfs/bolt/rotation/FileRotationPolicy.java
Java
apache-2.0
1,691
# Copyright (c) 2014 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from sahara import conductor as c from sahara import context from sahara import exceptions as e from sahara.i18n import _ from sahara.plugins.general import utils as plugin_utils from sahara.plugins.spark import config_helper as c_helper from sahara.service.edp import base_engine from sahara.service.edp import job_utils from sahara.service.validations.edp import job_execution as j from sahara.utils import edp from sahara.utils import files from sahara.utils import general from sahara.utils import remote conductor = c.API class SparkJobEngine(base_engine.JobEngine): def __init__(self, cluster): self.cluster = cluster def _get_pid_and_inst_id(self, job_id): try: pid, inst_id = job_id.split("@", 1) if pid and inst_id: return (pid, inst_id) except Exception: pass return "", "" def _get_instance_if_running(self, job_execution): pid, inst_id = self._get_pid_and_inst_id(job_execution.oozie_job_id) if not pid or not inst_id or ( job_execution.info['status'] in edp.JOB_STATUSES_TERMINATED): return None, None # TODO(tmckay): well, if there is a list index out of range # error here it probably means that the instance is gone. If we # have a job execution that is not terminated, and the instance # is gone, we should probably change the status somehow. # For now, do nothing. try: instance = general.get_instances(self.cluster, [inst_id])[0] except Exception: instance = None return pid, instance def _get_result_file(self, r, job_execution): result = os.path.join(job_execution.extra['spark-path'], "result") return r.execute_command("cat %s" % result, raise_when_error=False) def _check_pid(self, r, pid): ret, stdout = r.execute_command("ps hp %s" % pid, raise_when_error=False) return ret def _get_job_status_from_remote(self, r, pid, job_execution): # If the pid is there, it's still running if self._check_pid(r, pid) == 0: return {"status": edp.JOB_STATUS_RUNNING} # The process ended. Look in the result file to get the exit status ret, stdout = self._get_result_file(r, job_execution) if ret == 0: exit_status = stdout.strip() if exit_status == "0": return {"status": edp.JOB_STATUS_SUCCEEDED} # SIGINT will yield either -2 or 130 elif exit_status in ["-2", "130"]: return {"status": edp.JOB_STATUS_KILLED} # Well, process is done and result is missing or unexpected return {"status": edp.JOB_STATUS_DONEWITHERROR} def cancel_job(self, job_execution): pid, instance = self._get_instance_if_running(job_execution) if instance is not None: with remote.get_remote(instance) as r: ret, stdout = r.execute_command("kill -SIGINT %s" % pid, raise_when_error=False) if ret == 0: # We had some effect, check the status return self._get_job_status_from_remote(r, pid, job_execution) def get_job_status(self, job_execution): pid, instance = self._get_instance_if_running(job_execution) if instance is not None: with remote.get_remote(instance) as r: return self._get_job_status_from_remote(r, pid, job_execution) def _job_script(self): path = "service/edp/resources/launch_command.py" return files.get_file_text(path) def run_job(self, job_execution): ctx = context.ctx() job = conductor.job_get(ctx, job_execution.job_id) proxy_configs = job_execution.job_configs.get('proxy_configs') # We'll always run the driver program on the master master = plugin_utils.get_instance(self.cluster, "master") # TODO(tmckay): wf_dir should probably be configurable. # The only requirement is that the dir is writable by the image user wf_dir = job_utils.create_workflow_dir(master, '/tmp/spark-edp', job, job_execution.id) paths = job_utils.upload_job_files(master, wf_dir, job, libs_subdir=False, proxy_configs=proxy_configs) # We can shorten the paths in this case since we'll run out of wf_dir paths = [os.path.basename(p) for p in paths] # TODO(tmckay): for now, paths[0] is always assumed to be the app # jar and we generate paths in order (mains, then libs). # When we have a Spark job type, we can require a "main" and set # the app jar explicitly to be "main" app_jar = paths.pop(0) # The rest of the paths will be passed with --jars additional_jars = ",".join(paths) if additional_jars: additional_jars = "--jars " + additional_jars # Launch the spark job using spark-submit and deploy_mode = client host = master.hostname() port = c_helper.get_config_value("Spark", "Master port", self.cluster) spark_submit = os.path.join( c_helper.get_config_value("Spark", "Spark home", self.cluster), "bin/spark-submit") job_class = job_execution.job_configs.configs["edp.java.main_class"] # TODO(tmckay): we need to clean up wf_dirs on long running clusters # TODO(tmckay): probably allow for general options to spark-submit args = " ".join(job_execution.job_configs.get('args', [])) # The redirects of stdout and stderr will preserve output in the wf_dir cmd = "%s %s --class %s %s --master spark://%s:%s %s" % ( spark_submit, app_jar, job_class, additional_jars, host, port, args) # If an exception is raised here, the job_manager will mark # the job failed and log the exception with remote.get_remote(master) as r: # Upload the command launch script launch = os.path.join(wf_dir, "launch_command") r.write_file_to(launch, self._job_script()) r.execute_command("chmod +x %s" % launch) ret, stdout = r.execute_command( "cd %s; ./launch_command %s > /dev/null 2>&1 & echo $!" % (wf_dir, cmd)) if ret == 0: # Success, we'll add the wf_dir in job_execution.extra and store # pid@instance_id as the job id # We know the job is running so return "RUNNING" return (stdout.strip() + "@" + master.id, edp.JOB_STATUS_RUNNING, {'spark-path': wf_dir}) # Hmm, no execption but something failed. # Since we're using backgrounding with redirect, this is unlikely. raise e.EDPError(_("Spark job execution failed. Exit status = " "%(status)s, stdout = %(stdout)s") % {'status': ret, 'stdout': stdout}) def validate_job_execution(self, cluster, job, data): j.check_main_class_present(data, job) @staticmethod def get_possible_job_config(job_type): return {'job_config': {'configs': [], 'args': []}} @staticmethod def get_supported_job_types(): return [edp.JOB_TYPE_SPARK]
keedio/sahara
sahara/service/edp/spark/engine.py
Python
apache-2.0
8,343
<?php /** * @link https://github.com/gridiron-guru/FantasyDataAPI for the canonical source repository * @copyright Copyright (c) 2014 Robert Gunnar Johnson Jr. * @license http://opensource.org/licenses/Apache-2.0 * @package FantasyDataAPI */ namespace FantasyDataAPI\Test\Schedules\Response; use GuzzleHttp\Message\Response; use GuzzleHttp\Message\RequestInterface; use GuzzleHttp\Stream; class Mock extends Response { public function __construct (RequestInterface $pRequest) { /** url parsing "formula" for Schedules */ list(, $subscription, $format, , $season) = explode( '/', $pRequest->getPath() ); $file_partial = __DIR__ . '/' . implode('.', [$subscription, $format, $season]); $headers = include($file_partial . '.header.php'); $response_code = explode(' ', $headers[0])[1];; $mocked_response = file_get_contents($file_partial . '.body.' . $format); $stream = Stream\Stream::factory($mocked_response); parent::__construct($response_code, $headers, $stream); } }
mashurex/FantasyDataAPI
test/phpunit/Schedules/Response/Mock.php
PHP
apache-2.0
1,067
<div class="modal fade" id="addRepositoryModal"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button> <h4 class="modal-title">Add Repository</h4> </div> <div class="modal-body"> <form action="{% url 'repositories' %}" method="get" role="form"> <div class="form-group"> <label for="repo">URL:</label> <input type="text" class="form-control" name="repo"> </div> <div class="form-group"> <label for="type">Type:</label><br/> <label class="radio-inline"><input type="radio" name="type" value="django">Django</label> <label class="radio-inline"><input type="radio" name="type" value="ror">Ruby on Rails</label> <label class="radio-inline"><input type="radio" name="type" value="node">Node.js</label> <label class="radio-inline"><input type="radio" name="type" value="drupal">Drupal</label> <label class="radio-inline"><input type="radio" name="type" value="grails">Grails</label> </div> <div class="form-group"> <label for="scripts">Scripts:</label> <textarea class="form-control" rows="10" id="scripts" name="scripts"></textarea> </div> <div class="form-group"> <div style="margin: 10px; text-align: center;"> <input class="btn btn-primary btn-lg" type="submit" value="Add"/> </div> </div> </form> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </div><!-- /.modal -->
cmu-db/db-webcrawler
library/templates/admin/add_repository.html
HTML
apache-2.0
2,221
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.core.metamodel.facets.actions.defaults.method; import java.lang.reflect.Method; import java.util.Collections; import java.util.List; import org.apache.isis.core.metamodel.adapter.ObjectAdapter; import org.apache.isis.core.metamodel.facetapi.Facet; import org.apache.isis.core.metamodel.facetapi.FacetHolder; import org.apache.isis.core.metamodel.facets.ImperativeFacet; import org.apache.isis.core.metamodel.facets.actions.defaults.ActionDefaultsFacetAbstract; import org.apache.isis.core.metamodel.facets.actions.action.invocation.ActionInvocationFacetForDomainEventAbstract; import org.apache.isis.core.metamodel.facets.actions.action.invocation.ActionInvocationFacet; public class ActionDefaultsFacetViaMethod extends ActionDefaultsFacetAbstract implements ImperativeFacet { private final Method defaultMethod; @SuppressWarnings("unused") private final Method actionMethod; public ActionDefaultsFacetViaMethod(final Method defaultMethod, final FacetHolder holder) { super(holder, Derivation.NOT_DERIVED); this.defaultMethod = defaultMethod; this.actionMethod = determineActionMethod(holder); } private static Method determineActionMethod(final FacetHolder holder) { Method method2; final Facet actionInvocationFacet = holder.getFacet(ActionInvocationFacet.class); if (actionInvocationFacet instanceof ActionInvocationFacetForDomainEventAbstract) { final ActionInvocationFacetForDomainEventAbstract facetViaMethod = (ActionInvocationFacetForDomainEventAbstract) actionInvocationFacet; method2 = facetViaMethod.getMethods().get(0); } else { method2 = null; } return method2; } /** * Returns a singleton list of the {@link Method} provided in the * constructor. */ @Override public List<Method> getMethods() { return Collections.singletonList(defaultMethod); } @Override public Intent getIntent(final Method method) { return Intent.DEFAULTS; } @Override public boolean impliesResolve() { return true; } @Override public boolean impliesObjectChanged() { return false; } @Override public Object[] getDefaults(final ObjectAdapter owningAdapter) { return (Object[]) ObjectAdapter.InvokeUtils.invoke(defaultMethod, owningAdapter); } @Override protected String toStringValues() { return "method=" + defaultMethod; } }
kidaa/isis
core/metamodel/src/main/java/org/apache/isis/core/metamodel/facets/actions/defaults/method/ActionDefaultsFacetViaMethod.java
Java
apache-2.0
3,351
package yuku.alkitab.yes2.section; import yuku.alkitab.model.PericopeBlock; import yuku.alkitab.model.PericopeIndex; import yuku.alkitab.yes2.io.RandomInputStream; import yuku.alkitab.yes2.io.RandomOutputStream; import yuku.alkitab.yes2.model.PericopeData; import yuku.alkitab.yes2.model.PericopeData.Entry; import yuku.alkitab.yes2.model.Yes2PericopeBlock; import yuku.alkitab.yes2.section.base.SectionContent; import yuku.bintex.BintexReader; import yuku.bintex.BintexWriter; import java.io.IOException; public class PericopesSection extends SectionContent implements SectionContent.Writer { public static final String SECTION_NAME = "pericopes"; // for writing: PericopeData data_; // for reading: RandomInputStream input_; int data_offset_ = 0; PericopeIndex index_; private PericopesSection() { super(SECTION_NAME); } public PericopesSection(PericopeData data) { super(SECTION_NAME); this.data_ = data; } public Yes2PericopeBlock readBlock(int position) throws IOException { int offset = index_.offsets[position]; input_.seek(data_offset_ + offset); return Yes2PericopeBlock.read(input_); } @Override public void write(RandomOutputStream output) throws IOException { BintexWriter bw = new BintexWriter(output); long savedpos_sectionBegin = output.getFilePointer(); long savedpos_indexSize; long[] savedpos_entryOffsets; int[] savedoffset_entryOffsets; long savedpos_sectionEnd; // uint8 data_format_version: 3 bw.writeUint8(3); // int index_size savedpos_indexSize = output.getFilePointer(); bw.writeInt(-1); // placeholder // int entry_count int entry_count = data_.entries.size(); bw.writeInt(entry_count); // Entry[entry_count] savedpos_entryOffsets = new long[entry_count]; int last_entry_ari = 0; for (int i = 0; i < entry_count; i++) { Entry entry = data_.entries.get(i); // entry.ari is written as follows: // if difference to existing ari is <= 0x7fff, it's written in 2bytes: (0x8000 | difference) // otherwise it's written in 4 bytes: (0x00000000 | (0x00ffffff & ari)) int diff_ari = entry.ari - last_entry_ari; if (last_entry_ari == 0 || (diff_ari < 0 || diff_ari > 0x7fff)) { bw.writeInt(0x00000000 | (0x00ffffff & entry.ari)); } else { bw.writeUint16(0x8000 | diff_ari); } last_entry_ari = entry.ari; // entry.offset is always written as uint16 delta to the last one (with the first one considered offset 0), // so max size in bytes for a pericope entry is 65536 bytes. savedpos_entryOffsets[i] = output.getFilePointer(); bw.writeUint16(0xffff); // placeholder for later } long dataBeginOffset = output.getFilePointer(); savedoffset_entryOffsets = new int[entry_count]; for (int i = 0; i < entry_count; i++) { Entry entry = data_.entries.get(i); savedoffset_entryOffsets[i] = (int) (output.getFilePointer() - dataBeginOffset); /* Blok { * uint8 data_format_version = 4 * value title * uint8 parallel_count * value[parallel_count] parallels * } */ bw.writeUint8(4); // data_format_version bw.writeValueString(entry.block.title); // title bw.writeUint8(entry.block.parallels == null? 0: entry.block.parallels.size()); // parallel_count if (entry.block.parallels != null) { // parallels for (String parallel: entry.block.parallels) { bw.writeValueString(parallel); } } } savedpos_sectionEnd = output.getFilePointer(); int section_size = (int) (savedpos_sectionEnd - savedpos_sectionBegin); { // patches output.seek(savedpos_indexSize); bw.writeInt(section_size); int last_offset = 0; for (int i = 0; i < entry_count; i++) { int diff_offset = savedoffset_entryOffsets[i] - last_offset; if (diff_offset > 0xffff) { throw new RuntimeException("a pericope entry can't be larger than 65535 bytes"); } output.seek(savedpos_entryOffsets[i]); bw.writeUint16(diff_offset); last_offset = savedoffset_entryOffsets[i]; } } output.seek(savedpos_sectionEnd); } public static class Reader implements SectionContent.Reader<PericopesSection> { @Override public PericopesSection read(RandomInputStream input) throws Exception { BintexReader br = new BintexReader(input); int version = br.readUint8(); if (version != 2 && version != 3) { throw new RuntimeException("PericopeIndex version not supported: " + version); } /* int index_size = */ br.readInt(); int entry_count = br.readInt(); PericopesSection res = new PericopesSection(); res.index_ = new PericopeIndex(); int[] aris = new int[entry_count]; res.index_.aris = aris; int[] offsets = new int[entry_count]; res.index_.offsets = offsets; if (version == 2) { for (int i = 0; i < entry_count; i++) { aris[i] = br.readInt(); offsets[i] = br.readInt(); } } else if (version == 3) { int last_entry_ari = 0; int last_entry_offset = 0; for (int i = 0; i < entry_count; i++) { // entry.ari is written as follows: // if difference to existing ari is <= 0x7fff, it's written in 2bytes: (0x8000 | difference) // otherwise it's written in 4 bytes: (0x00000000 | (0x00ffffff & ari)) int data_ari = br.readUint16(); int ari; if ((data_ari & 0x8000) == 0) { // absolute ari = (data_ari << 16) | br.readUint16(); } else { // relative ari = last_entry_ari + (data_ari & 0x7fff); } aris[i] = last_entry_ari = ari; // entry.offset is always written as uint16 delta to the last one (with the first one considered offset 0), // so max size in bytes for a pericope entry is 65536 bytes. int data_offset = br.readUint16(); int offset = last_entry_offset + data_offset; offsets[i] = last_entry_offset = offset; } } res.input_ = input; res.data_offset_ = (int) input.getFilePointer(); return res; } } /** * @param aris (result param) the actual aris of the pericopes * @param blocks (result param) the pericope blocks found * @param max maximum number of results to return, must be less than or equal to min(aris.length, blocks.length) * @return number of pericopes loaded by this method */ public int getPericopesForAris(int ari_from, int ari_to, int[] aris, PericopeBlock[] blocks, int max) throws IOException { int first = index_.findFirst(ari_from, ari_to); if (first == -1) { return 0; } int cur = first; int res = 0; while (true) { int ari = index_.getAri(cur); if (ari >= ari_to) { // no more break; } Yes2PericopeBlock block = readBlock(cur); cur++; if (res < max) { aris[res] = ari; blocks[res] = block; res++; } else { break; } } return res; } }
Jaden-J/androidbible
AlkitabYes2/src/main/java/yuku/alkitab/yes2/section/PericopesSection.java
Java
apache-2.0
6,801
var mainwindow_8h = [ [ "MainWindow", "d9/dc6/class_main_window.html", "d9/dc6/class_main_window" ], [ "BINARY_FILE", "d9/d53/mainwindow_8h.html#a235764c67fdcb87b7484bdc1ef801959", null ], [ "DEFAULT_DATA_DIR", "d9/d53/mainwindow_8h.html#a89d9b90dbc1df3ab30a2d08519e40e49", null ], [ "DEFAULT_DATA_FILE_EXT", "d9/d53/mainwindow_8h.html#a7009a0f1670c12ad66218104884f43a8", null ], [ "DEFAULT_DATA_PLAYER_FILE", "d9/d53/mainwindow_8h.html#a92e4f26a961aad80bec26b9f44030c3d", null ], [ "DEFAULT_DATA_TEAM_FILE", "d9/d53/mainwindow_8h.html#af219f407cc9a763920d8e798c8d6031b", null ] ];
delfernan/LP
release/doc/html/d9/d53/mainwindow_8h.js
JavaScript
apache-2.0
605
/* * Copyright 2017 PayPal * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package akka.http.org.squbs.util import java.util.Optional import akka.NotUsed import akka.http.impl.util.JavaMapping import akka.http.javadsl.{model => jm} import akka.http.scaladsl.Http.HostConnectionPool import akka.http.scaladsl.HttpsConnectionContext import akka.http.scaladsl.model.{HttpRequest, HttpResponse} import akka.http.scaladsl.settings.ConnectionPoolSettings import akka.http.{javadsl => jd} import akka.japi.Pair import akka.stream.scaladsl.{BidiFlow, Flow} import akka.stream.{javadsl => js} import scala.util.Try /** * The JavaConverters are under Akka HTTP package to get access to the non-exposed converters there. */ object JavaConverters { def fromJava(connectionContext: Optional[jd.HttpsConnectionContext], settings: Optional[jd.settings.ConnectionPoolSettings]): (Option[HttpsConnectionContext], Option[ConnectionPoolSettings]) = { import scala.compat.java8.OptionConverters._ val cCtx = connectionContext.asScala.asInstanceOf[Option[HttpsConnectionContext]] val sSettings = settings.asScala.asInstanceOf[Option[ConnectionPoolSettings]] (cCtx, sSettings) } def toJava[In1, Out1, In2, Out2, Context](bidiFlow: BidiFlow[(In1, Context), (Out1, Context), (In2, Context), (Out2, Context), NotUsed]): js.BidiFlow[Pair[In1, Context], Pair[Out1, Context], Pair[In2, Context], Pair[Out2, Context], NotUsed] = { implicit val sIn1Mapping = JavaMapping.identity[In1] implicit val sOut1Mapping = JavaMapping.identity[Out1] implicit val sIn2Mapping = JavaMapping.identity[In2] implicit val sOut2Mapping = JavaMapping.identity[Out2] implicit val contextMapping = JavaMapping.identity[Context] val javaToScalaAdapter = JavaMapping.adapterBidiFlow[Pair[In1, Context], (In1, Context), (Out2, Context), Pair[Out2, Context]] val scalaToJavaAdapter = JavaMapping.adapterBidiFlow[Pair[In2, Context], (In2, Context), (Out1, Context), Pair[Out1, Context]].reversed javaToScalaAdapter.atop(bidiFlow).atop(scalaToJavaAdapter).asJava } private def adaptTupleFlow[T](scalaFlow: Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool]): js.Flow[Pair[jm.HttpRequest, T], Pair[Try[jm.HttpResponse], T], jd.HostConnectionPool] = { implicit val jIdentityMapping = JavaMapping.identity[T] implicit object HostConnectionPoolMapping extends JavaMapping[jd.HostConnectionPool, HostConnectionPool] { def toScala(javaObject: jd.HostConnectionPool): HostConnectionPool = throw new UnsupportedOperationException("jd.HostConnectionPool cannot be converted to Scala") def toJava(scalaObject: HostConnectionPool): jd.HostConnectionPool = scalaObject.toJava } JavaMapping.toJava(scalaFlow)(JavaMapping.flowMapping[Pair[jm.HttpRequest, T], (HttpRequest, T), Pair[Try[jm.HttpResponse], T], (Try[HttpResponse], T), jd.HostConnectionPool, HostConnectionPool]) } def toJava[T](flow: Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool]): js.Flow[Pair[jm.HttpRequest, T], Pair[Try[jm.HttpResponse], T], jd.HostConnectionPool] = { adaptTupleFlow[T](flow) } def toScala(uri: akka.http.javadsl.model.Uri) = JavaMapping.toScala(uri) }
akara/squbs
squbs-ext/src/main/scala/akka/http/org/squbs/util/JavaConverters.scala
Scala
apache-2.0
3,782
<head> <!-- **************************************************************************** --> <!-- SITE SETTINGS --> <!-- **************************************************************************** --> <!-- Font to use within the entire site --> <link rel='stylesheet' type='text/css' href='http://fonts.googleapis.com/css?family=Ubuntu:regular,italic,bold,bolditalic' > <!-- Favicon (32x32) to show on the explorer's address bar --> <link rel='shortcut icon' href='img/InvFEST_ico.png'> <!-- Page title --> <title>InvFEST: Human Polymorphic Inversion DataBase</title> <!-- Site metadata --> <meta charset='UTF-8'> <meta name="keywords" content="inversion,inversions,inversiones,chromosomal inversion,inversión cromosómica"> <meta name="description" content="Human Polymorphic Inversions Database"> <meta name="author" content="Miquel Ràmia/Raquel Egea"> <!-- **************************************************************************** --> <!-- STYLES --> <!-- **************************************************************************** --> <link rel='stylesheet' type='text/css' href='css/style.css'/> <link rel='stylesheet' type='text/css' href='css/report.css'/> <link rel='stylesheet' type='text/css' href='css/search.css'/> <!-- **************************************************************************** --> <!-- SCRIPTS --> <!-- **************************************************************************** --> <!-- Include JQuery from a CDN (or locally if the site is offline) --> <script src="https://code.jquery.com/jquery-1.4.2.min.js"></script> <script> $(window).jQuery || $(document).write('<script src="js/jquery-1.4.2.min.js" async><\/script>') </script> <!-- **************************************************************************** --> <!-- Script to allow tooltips: an "note" that appears when a user hovers over an obejct --> <script> $(document).ready(function() { // Tooltip only Text $('.masterTooltip').hover(function() { // Hover over code var title=$(this).attr('title'); $(this).data('tipText', title).removeAttr('title'); $('<p class="tooltip"></p>') .text(title) .appendTo('body') .fadeIn('slow'); }, function() { // Hover out code $(this).attr('title', $(this).data('tipText')); $('.tooltip').remove(); }).mousemove(function(e) { var mousex=e.pageX + 20; //Get X coordinates var mousey=e.pageY + 10; //Get Y coordinates $('.tooltip') .css({ top: mousey, left: mousex }) }); }); </script> <!-- **************************************************************************** --> <!-- JS-AJAX script allows information uptating without reloading the whole page --> <script src='js/myAJAXlib.js'></script> <!-- **************************************************************************** --> <!-- Loads Hihgslide JS: image, media and gallery viewer written in JavaScript --> <link href='css/css_highslide.css' rel='stylesheet' type='text/css'/> <script src='js/highslide_complete.js'></script> <script> hs.graphicsDir='img/highslide_graphics/'; hs.outlineType='rounded-white'; hs.outlineWhileAnimating=true; </script> <!-- **************************************************************************** --> <!-- Include the script to sort table content --> <script src='js/jquery.tablesorter.min.js'></script> <script> function pad(number, length) { var str='' + number; while (str.length < length) { str='0' + str; } return str; } // Add parser through the tablesorter addParser method $.tablesorter.addParser({ // Set a unique id id: 'size', is: function(s) { // Return false so this parser is not auto detected return false; }, format: function(s) { // Format your data for normalization return s.replace(/,/g,''); }, // Set type, either numeric or text type: 'numeric' }); $.tablesorter.addParser({ // Set a unique id id: 'status', is: function(s) { // Return false so this parser is not auto detected return false; }, format: function(s) { // Format your data for normalization return s.replace('Validated','1').replace('Predicted','2').replace('Unreliable prediction','3').replace('False','4'); }, // Set type, either numeric or text type: 'text' }); $.tablesorter.addParser({ // Set a unique id id: 'effect', is: function(s) { // Return false so this parser is not auto detected return false; }, format: function(s) { // Format your data for normalization return s.replace('Breaks two genes','1').replace('Breaks one gene','2').replace('Breaks different exons and introns of a gene','3').replace('Breaks a region within an exon of a gene','4').replace('Breaks a region within an intron of a gene','5').replace('Intergenic','6').replace('NA','7'); }, // Set type, either numeric or text type: 'text' }); $.tablesorter.addParser({ // Set a unique id id: 'position', is: function(s) { // Return false so this parser is not auto detected return false; }, format: function(s) { // Format your data for normalization var myposition=s.match(/^chr([^:]+):(\d+)-(\d+)$/); var mychrom=myposition[1]; mychrom=mychrom.replace('X',997).replace('Y',998).replace('M',999); var mystart=myposition[2]; var myend=myposition[3]; var mynewchrom=pad(mychrom,20); var mynewstart=pad(mystart,20); var mynewend=pad(myend,20); var mynewposition=mynewchrom + mynewstart + mynewend; return mynewposition; }, // Set type, either numeric or text type: 'text' }); // Command to make the 'sort table' option, applicable within the 'expandable divisions' (e.g. table of searching results) $(document).ready(function(){ $('#sort_table').tablesorter({headers: {1:{sorter:'position'},2:{sorter:'size'},3:{sorter:'status'},5:{sorter:'effect'}}}); }); $(document).ready(function(){ $('#sort_table2').tablesorter(); }); </script> <!-- **************************************************************************** --> <!-- Include the script for the floating menu --> <script src="js/header.js"></script> <!-- **************************************************************************** --> <!-- Script to allow 'expandable' divisions --> <script> $(document).ready(function(){ $(".hidden").hide(); //Hide all contents $(".section-title").click(function(){ //Toggle when click title $(this).next(".hidden, .grlsection-content, .section-content").slideToggle(600); var title = $(this).html(); var regExp = /\+/; if (title.match(regExp)) { title = title.replace('+','-'); $(this).html(title); } else { title = title.replace('-','+'); $(this).html(title); } }); }); </script> <!-- </head> --> <!-- The head will be closed when 'printed' in each page's script. It's made like this just in case the page needs extra scripts, styles, etc. -->
caceres-lab/InvFEST-code
invfreeze/html/header.html
HTML
apache-2.0
7,893
package com.iisquare.solr.wltea.util; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; /** * Servlet操作类 */ public class ServletUtil { public static final String cookieEncoding = "UTF-8"; public static final String regexParameterMapKey = "((?<!\\[)[^\\[\\]]+(?!\\])|(?<=\\[)[^\\[\\]]*(?=\\]))"; /** * 解析ParameterMap,将中括号[]中的字符串转换为下标 下标支持非中括号[]的任意字符,包括空格等 * 若存在多个相同的下标(以中括号[]标识的数组除外),默认取最后一个下标对应的值 * * @param parameterMap * 参数Map */ public static Map<String, Object> parseParameterMap( Map<String, String[]> parameterMap) { Map<String, Object> map = new LinkedHashMap<String, Object>(); for (Map.Entry<String, String[]> entry : parameterMap.entrySet()) { List<String> keys = DPUtil.getMatcher(regexParameterMapKey, entry.getKey(), true); generateParameterMap(map, keys, entry.getValue(), 0, keys.size()); } return map; } /** * 按照KV形式,递归生成ParameterMap * * @param map * 当前层级的LinkedHashMap<String, Object> * @param keyList * 下标列表 * @param valueArray * 下标对应值 * @param index * 下标当前位置 * @param length * 处理深度 */ @SuppressWarnings("unchecked") public static Map<String, Object> generateParameterMap( Map<String, Object> map, List<String> keyList, String[] valueArray, int index, int length) { int indexNext = index + 1; String key = keyList.get(index); if (indexNext >= length) { // 当前为最终位置,不存在下级元素 map.put(key, valueArray.length > 0 ? valueArray[valueArray.length - 1] : ""); // 默认取最后一个值 return map; } String keyNext = keyList.get(indexNext); // 存在下级元素 if (0 == keyNext.length()) { // 下级元素为[]数组形式,应为最终位置 map.put(key, valueArray); return map; } /* 下级元素为KV形式,继续递归处理 */ Map<String, Object> subMap = (Map<String, Object>) map.get(key); if (null == subMap) subMap = new LinkedHashMap<String, Object>(); // 初始化下级Map map.put(key, generateParameterMap(subMap, keyList, valueArray, indexNext, length)); return map; } public static void addCookie(HttpServletRequest request, HttpServletResponse response, String key, String value, int maxAge) throws UnsupportedEncodingException { if (null != value) value = URLEncoder.encode(value, cookieEncoding); Cookie cookie = new Cookie(key, value); String host = request.getHeader("host"); if (host.indexOf(":") > -1) { host = host.split(":")[0]; } cookie.setDomain(host); cookie.setPath("/"); cookie.setMaxAge(maxAge); response.addCookie(cookie); } public static String getCookie(HttpServletRequest request, String key) throws UnsupportedEncodingException { Cookie cookies[] = request.getCookies(); if (null == cookies) return null; for (Cookie cookie : cookies) { if (key.equals(cookie.getName())) return URLDecoder.decode(cookie.getValue(), cookieEncoding); } return null; } public static void setSession(HttpServletRequest request, Map<String, Object> map) { HttpSession session = request.getSession(); for (Map.Entry<String, Object> item : map.entrySet()) { session.setAttribute(item.getKey(), item.getValue()); } } public static void setSession(HttpServletRequest request, String key, Object value) { HttpSession session = request.getSession(); session.setAttribute(key, value); } public static Map<String, Object> getSession(HttpServletRequest request) { HttpSession session = request.getSession(); Map<String, Object> map = new HashMap<String, Object>(); Enumeration<?> e = session.getAttributeNames(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); map.put(key, session.getAttribute(key)); } return map; } public static Object getSession(HttpServletRequest request, String key) { HttpSession session = request.getSession(); return session.getAttribute(key); } public static Map<String, Object> getSessionMap(HttpServletRequest request) { HttpSession session = request.getSession(); Enumeration<String> enumeration = session.getAttributeNames(); Map<String, Object> map = new HashMap<String, Object>(); while (enumeration.hasMoreElements()) { String name = enumeration.nextElement().toString(); map.put(name, session.getAttribute(name)); } return map; } public static void invalidateSession(HttpServletRequest request) { HttpSession session = request.getSession(); session.invalidate(); } /** * 获取客户端IP地址 */ public static String getRemoteAddr(HttpServletRequest request) { String ip = request.getHeader("x-forwarded-for"); if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) { ip = request.getHeader("Proxy-Client-IP"); } if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) { ip = request.getHeader("WL-Proxy-Client-IP"); } if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) { ip = request.getRemoteAddr(); } return ip; } /** * 获取项目物理路径 */ public static String getWebRoot(HttpServletRequest request) { String webRoot = request.getSession().getServletContext() .getRealPath("/"); return webRoot.substring(0, webRoot.length() - 1); } /** * 获取项目访问地址 * * @param bWithDomain * 是否携带域名地址 */ public static String getWebUrl(HttpServletRequest request, boolean bWithDomain) { StringBuilder sb = new StringBuilder(); if (bWithDomain) { sb.append(request.getScheme()).append("://") .append(request.getServerName()); if (80 != request.getServerPort()) sb.append(":").append(request.getServerPort()); } sb.append(request.getContextPath()); return sb.toString(); } /** * 获取完整请求地址和参数 * * @param bWithWebUrl * 是否携带项目地址 * @param bWithQuery * 是否携带请求参数 */ public static String getFullUrl(HttpServletRequest request, boolean bWithWebUrl, boolean bWithQuery) { String requestUrl = request.getRequestURL().toString(); if (bWithQuery) { String queryString = request.getQueryString(); if (null != queryString) requestUrl = DPUtil.stringConcat(requestUrl, "?", queryString); } if (!bWithWebUrl) requestUrl = requestUrl .substring(getWebUrl(request, true).length()); return requestUrl; } /** * 获取目录分隔符 */ public static String getDirectorySeparator(HttpServletRequest request) { String webRoot = getWebRoot(request); if (webRoot.startsWith("/")) return "/"; return "\\"; } }
iisquare/analysis-ik-online
solr6.x/src/main/java/com/iisquare/solr/wltea/util/ServletUtil.java
Java
apache-2.0
7,227
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** Name of the state. Can be used in, e.g., $state.go method. */ export const stateName = 'ingress.list';
IanLewis/dashboard
src/app/frontend/ingress/list/state.js
JavaScript
apache-2.0
717
<!DOCTYPE HTML> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc --> <title>Uses of Class org.apache.sysds.runtime.io.hdf5.message.H5DataTypeMessage (Apache SystemDS 2.3.0-SNAPSHOT API)</title> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style"> <link rel="stylesheet" type="text/css" href="../../../../../../../../jquery/jquery-ui.css" title="Style"> <script type="text/javascript" src="../../../../../../../../script.js"></script> <script type="text/javascript" src="../../../../../../../../jquery/jszip/dist/jszip.min.js"></script> <script type="text/javascript" src="../../../../../../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script> <!--[if IE]> <script type="text/javascript" src="../../../../../../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script> <![endif]--> <script type="text/javascript" src="../../../../../../../../jquery/jquery-3.5.1.js"></script> <script type="text/javascript" src="../../../../../../../../jquery/jquery-ui.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.sysds.runtime.io.hdf5.message.H5DataTypeMessage (Apache SystemDS 2.3.0-SNAPSHOT API)"; } } catch(err) { } //--> var pathtoroot = "../../../../../../../../"; var useModuleDirectories = true; loadScripts(document, 'script');</script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <header role="banner"> <nav role="navigation"> <div class="fixedNav"> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a id="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a id="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../../index.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../H5DataTypeMessage.html" title="class in org.apache.sysds.runtime.io.hdf5.message">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../../allclasses.html">All&nbsp;Classes</a></li> </ul> <ul class="navListSearch"> <li><label for="search">SEARCH:</label> <input type="text" id="search" value="search" disabled="disabled"> <input type="reset" id="reset" value="reset" disabled="disabled"> </li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> </div> <a id="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> </div> <div class="navPadding">&nbsp;</div> <script type="text/javascript"><!-- $('.navPadding').css('padding-top', $('.fixedNav').css("height")); //--> </script> </nav> </header> <main role="main"> <div class="header"> <h2 title="Uses of Class org.apache.sysds.runtime.io.hdf5.message.H5DataTypeMessage" class="title">Uses of Class<br>org.apache.sysds.runtime.io.hdf5.message.H5DataTypeMessage</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary"> <caption><span>Packages that use <a href="../H5DataTypeMessage.html" title="class in org.apache.sysds.runtime.io.hdf5.message">H5DataTypeMessage</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <th class="colFirst" scope="row"><a href="#org.apache.sysds.runtime.io.hdf5">org.apache.sysds.runtime.io.hdf5</a></th> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"> <section role="region"><a id="org.apache.sysds.runtime.io.hdf5"> <!-- --> </a> <h3>Uses of <a href="../H5DataTypeMessage.html" title="class in org.apache.sysds.runtime.io.hdf5.message">H5DataTypeMessage</a> in <a href="../../package-summary.html">org.apache.sysds.runtime.io.hdf5</a></h3> <table class="useSummary"> <caption><span>Methods in <a href="../../package-summary.html">org.apache.sysds.runtime.io.hdf5</a> that return <a href="../H5DataTypeMessage.html" title="class in org.apache.sysds.runtime.io.hdf5.message">H5DataTypeMessage</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colSecond" scope="col">Method</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code><a href="../H5DataTypeMessage.html" title="class in org.apache.sysds.runtime.io.hdf5.message">H5DataTypeMessage</a></code></td> <th class="colSecond" scope="row"><span class="typeNameLabel">H5ContiguousDataset.</span><code><span class="memberNameLink"><a href="../../H5ContiguousDataset.html#getDataType()">getDataType</a></span>()</code></th> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </section> </li> </ul> </li> </ul> </div> </main> <footer role="contentinfo"> <nav role="navigation"> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a id="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a id="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../../index.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../H5DataTypeMessage.html" title="class in org.apache.sysds.runtime.io.hdf5.message">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../../allclasses.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> </div> <a id="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </nav> <p class="legalCopy"><small>Copyright &#169; 2021 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p> </footer> </body> </html>
apache/incubator-systemml
docs/api/java/org/apache/sysds/runtime/io/hdf5/message/class-use/H5DataTypeMessage.html
HTML
apache-2.0
7,380
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_55) on Fri Jun 20 06:34:21 EDT 2014 --> <meta http-equiv="Content-Type" content="text/html" charset="utf-8"> <title>Uses of Class org.apache.solr.search.grouping.distributed.requestfactory.TopGroupsShardRequestFactory (Solr 4.9.0 API)</title> <meta name="date" content="2014-06-20"> <link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.solr.search.grouping.distributed.requestfactory.TopGroupsShardRequestFactory (Solr 4.9.0 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../../org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.html" title="class in org.apache.solr.search.grouping.distributed.requestfactory">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../../index.html?org/apache/solr/search/grouping/distributed/requestfactory/class-use/TopGroupsShardRequestFactory.html" target="_top">Frames</a></li> <li><a href="TopGroupsShardRequestFactory.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.solr.search.grouping.distributed.requestfactory.TopGroupsShardRequestFactory" class="title">Uses of Class<br>org.apache.solr.search.grouping.distributed.requestfactory.TopGroupsShardRequestFactory</h2> </div> <div class="classUseContainer">No usage of org.apache.solr.search.grouping.distributed.requestfactory.TopGroupsShardRequestFactory</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../../org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.html" title="class in org.apache.solr.search.grouping.distributed.requestfactory">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../../index.html?org/apache/solr/search/grouping/distributed/requestfactory/class-use/TopGroupsShardRequestFactory.html" target="_top">Frames</a></li> <li><a href="TopGroupsShardRequestFactory.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small> <i>Copyright &copy; 2000-2014 Apache Software Foundation. All Rights Reserved.</i> <script src='../../../../../../../../prettify.js' type='text/javascript'></script> <script type='text/javascript'> (function(){ var oldonload = window.onload; if (typeof oldonload != 'function') { window.onload = prettyPrint; } else { window.onload = function() { oldonload(); prettyPrint(); } } })(); </script> </small></p> </body> </html>
BibAlex/bhl_rails_4_solr
docs/solr-core/org/apache/solr/search/grouping/distributed/requestfactory/class-use/TopGroupsShardRequestFactory.html
HTML
apache-2.0
5,434
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #pragma once #include <aws/devicefarm/DeviceFarm_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSMap.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <aws/devicefarm/model/OfferingStatus.h> #include <utility> namespace Aws { template<typename RESULT_TYPE> class AmazonWebServiceResult; namespace Utils { namespace Json { class JsonValue; } // namespace Json } // namespace Utils namespace DeviceFarm { namespace Model { /** * <p>Returns the status result for a device offering.</p><p><h3>See Also:</h3> * <a * href="http://docs.aws.amazon.com/goto/WebAPI/devicefarm-2015-06-23/GetOfferingStatusResult">AWS * API Reference</a></p> */ class AWS_DEVICEFARM_API GetOfferingStatusResult { public: GetOfferingStatusResult(); GetOfferingStatusResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); GetOfferingStatusResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); /** * <p>When specified, gets the offering status for the current period.</p> */ inline const Aws::Map<Aws::String, OfferingStatus>& GetCurrent() const{ return m_current; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline void SetCurrent(const Aws::Map<Aws::String, OfferingStatus>& value) { m_current = value; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline void SetCurrent(Aws::Map<Aws::String, OfferingStatus>&& value) { m_current = std::move(value); } /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& WithCurrent(const Aws::Map<Aws::String, OfferingStatus>& value) { SetCurrent(value); return *this;} /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& WithCurrent(Aws::Map<Aws::String, OfferingStatus>&& value) { SetCurrent(std::move(value)); return *this;} /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& AddCurrent(const Aws::String& key, const OfferingStatus& value) { m_current.emplace(key, value); return *this; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& AddCurrent(Aws::String&& key, const OfferingStatus& value) { m_current.emplace(std::move(key), value); return *this; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& AddCurrent(const Aws::String& key, OfferingStatus&& value) { m_current.emplace(key, std::move(value)); return *this; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& AddCurrent(Aws::String&& key, OfferingStatus&& value) { m_current.emplace(std::move(key), std::move(value)); return *this; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& AddCurrent(const char* key, OfferingStatus&& value) { m_current.emplace(key, std::move(value)); return *this; } /** * <p>When specified, gets the offering status for the current period.</p> */ inline GetOfferingStatusResult& AddCurrent(const char* key, const OfferingStatus& value) { m_current.emplace(key, value); return *this; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline const Aws::Map<Aws::String, OfferingStatus>& GetNextPeriod() const{ return m_nextPeriod; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline void SetNextPeriod(const Aws::Map<Aws::String, OfferingStatus>& value) { m_nextPeriod = value; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline void SetNextPeriod(Aws::Map<Aws::String, OfferingStatus>&& value) { m_nextPeriod = std::move(value); } /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& WithNextPeriod(const Aws::Map<Aws::String, OfferingStatus>& value) { SetNextPeriod(value); return *this;} /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& WithNextPeriod(Aws::Map<Aws::String, OfferingStatus>&& value) { SetNextPeriod(std::move(value)); return *this;} /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& AddNextPeriod(const Aws::String& key, const OfferingStatus& value) { m_nextPeriod.emplace(key, value); return *this; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& AddNextPeriod(Aws::String&& key, const OfferingStatus& value) { m_nextPeriod.emplace(std::move(key), value); return *this; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& AddNextPeriod(const Aws::String& key, OfferingStatus&& value) { m_nextPeriod.emplace(key, std::move(value)); return *this; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& AddNextPeriod(Aws::String&& key, OfferingStatus&& value) { m_nextPeriod.emplace(std::move(key), std::move(value)); return *this; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& AddNextPeriod(const char* key, OfferingStatus&& value) { m_nextPeriod.emplace(key, std::move(value)); return *this; } /** * <p>When specified, gets the offering status for the next period.</p> */ inline GetOfferingStatusResult& AddNextPeriod(const char* key, const OfferingStatus& value) { m_nextPeriod.emplace(key, value); return *this; } /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline const Aws::String& GetNextToken() const{ return m_nextToken; } /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline void SetNextToken(const Aws::String& value) { m_nextToken = value; } /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline void SetNextToken(Aws::String&& value) { m_nextToken = std::move(value); } /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline void SetNextToken(const char* value) { m_nextToken.assign(value); } /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline GetOfferingStatusResult& WithNextToken(const Aws::String& value) { SetNextToken(value); return *this;} /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline GetOfferingStatusResult& WithNextToken(Aws::String&& value) { SetNextToken(std::move(value)); return *this;} /** * <p>An identifier that was returned from the previous call to this operation, * which can be used to return the next set of items in the list.</p> */ inline GetOfferingStatusResult& WithNextToken(const char* value) { SetNextToken(value); return *this;} private: Aws::Map<Aws::String, OfferingStatus> m_current; Aws::Map<Aws::String, OfferingStatus> m_nextPeriod; Aws::String m_nextToken; }; } // namespace Model } // namespace DeviceFarm } // namespace Aws
cedral/aws-sdk-cpp
aws-cpp-sdk-devicefarm/include/aws/devicefarm/model/GetOfferingStatusResult.h
C
apache-2.0
8,801
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ """ The pyflink version will be consistent with the flink version and follow the PEP440. .. seealso:: https://www.python.org/dev/peps/pep-0440 """ __version__ = "1.13.dev0"
aljoscha/flink
flink-python/pyflink/version.py
Python
apache-2.0
1,132
/* * Copyright (c) 2019 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.gui.api; import org.apache.wicket.markup.html.form.FormComponent; import java.io.Serializable; public interface Validatable extends Serializable { FormComponent getValidatableComponent(); }
bshp/midPoint
gui/admin-gui/src/main/java/com/evolveum/midpoint/gui/api/Validatable.java
Java
apache-2.0
413
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.gwac.action; import java.util.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.struts2.convention.annotation.Result; import org.apache.struts2.interceptor.SessionAware; import com.gwac.model.UserInfo; import com.gwac.dao.CustomerDAO; import com.gwac.service.UserInfoService; import com.opensymphony.xwork2.ActionSupport; @Result(name = "success", type = "json") public class UserList extends ActionSupport implements SessionAware { private static final long serialVersionUID = 5078264277068533593L; private static final Log log = LogFactory.getLog(UserList.class); // Your result List private List<UserInfo> gridModel; // get how many rows we want to have into the grid - rowNum attribute in the // grid private Integer rows = 0; // Get the requested page. By default grid sets this to 1. private Integer page = 0; // sorting order - asc or desc private String sord; // get index row - i.e. user click to sort. private String sidx; // Search Field private String searchField; // The Search String private String searchString; // Limit the result when using local data, value form attribute rowTotal private Integer totalrows; // he Search Operation // ['eq','ne','lt','le','gt','ge','bw','bn','in','ni','ew','en','cn','nc'] private String searchOper; // Your Total Pages private Integer total = 0; // All Records private Integer records = 0; private boolean loadonce = false; private Map<String, Object> session; private List<UserInfo> myCustomers; private UserInfoService userService; @SuppressWarnings("unchecked") public String execute() { log.debug("Page " + getPage() + " Rows " + getRows() + " Sorting Order " + getSord() + " Index Row :" + getSidx()); log.debug("Search :" + searchField + " " + searchOper + " " + searchString); // Count all record (select count(*) from your_custumers) int tn = userService.count(); log.debug("number="+tn); if (totalrows != null) { records = totalrows; } // if(rows==-1){ // rows = records; // } // Calucalate until rows ware selected int to = (rows * page); // Calculate the first row to read int from = to - rows; // Set to = max rows if (to > records) { to = records; } // setGridModel(dpmDao.findAll(page, rows)); gridModel = userService.listUser(from, rows); // gridModel = tspDao.findAll(); log.debug("from="+from); log.debug("to="+to); log.debug("size=" + gridModel.size()); // for(Telescope dpm: gridModel){ // log.debug("name="+dpm.getName()); // } // Calculate total Pages total = (int) Math.ceil((double) records / (double) rows); return SUCCESS; } // public String getJSON() { // return execute(); // } /** * @return how many rows we want to have into the grid */ public Integer getRows() { return rows; } /** * @param rows how many rows we want to have into the grid */ public void setRows(Integer rows) { this.rows = rows; } /** * @return current page of the query */ public Integer getPage() { return page; } /** * @param page current page of the query */ public void setPage(Integer page) { this.page = page; } /** * @return total pages for the query */ public Integer getTotal() { return total; } /** * @param total total pages for the query */ public void setTotal(Integer total) { this.total = total; } /** * @return total number of records for the query. e.g. select count(*) from * table */ public Integer getRecords() { return records; } /** * @param records total number of records for the query. e.g. select count(*) * from table */ public void setRecords(Integer records) { this.records = records; if (this.records > 0 && this.rows > 0) { this.total = (int) Math.ceil((double) this.records / (double) this.rows); } else { this.total = 0; } } /** * @return an collection that contains the actual data */ public List<UserInfo> getGridModel() { return gridModel; } /** * @param gridModel an collection that contains the actual data */ public void setGridModel(List<UserInfo> gridModel) { this.gridModel = gridModel; } /** * @return sorting order */ public String getSord() { return sord; } /** * @param sord sorting order */ public void setSord(String sord) { this.sord = sord; } /** * @return get index row - i.e. user click to sort. */ public String getSidx() { return sidx; } /** * @param sidx get index row - i.e. user click to sort. */ public void setSidx(String sidx) { this.sidx = sidx; } public void setSearchField(String searchField) { this.searchField = searchField; } public void setSearchString(String searchString) { this.searchString = searchString; } public void setSearchOper(String searchOper) { this.searchOper = searchOper; } public void setLoadonce(boolean loadonce) { this.loadonce = loadonce; } public void setSession(Map<String, Object> session) { this.session = session; } public void setTotalrows(Integer totalrows) { this.totalrows = totalrows; } /** * @return the userService */ // public UserInfoService getUserService() { // return userService; // } /** * @param userService the userService to set */ public void setUserService(UserInfoService userService) { this.userService = userService; } }
archord/svom
src/main/java/com/gwac/action/UserList.java
Java
apache-2.0
6,485
<?php /** * @file * Contains \Drupal\aggregator\Plugin\aggregator\fetcher\DefaultFetcher. */ namespace Drupal\aggregator\Plugin\aggregator\fetcher; use Drupal\aggregator\Annotation\AggregatorFetcher; use Drupal\aggregator\Plugin\FetcherInterface; use Drupal\aggregator\Entity\Feed; use Drupal\Core\Annotation\Translation; use Drupal\Core\Plugin\ContainerFactoryPluginInterface; use Guzzle\Http\ClientInterface; use Guzzle\Http\Exception\BadResponseException; use Guzzle\Http\Exception\RequestException; use Symfony\Component\DependencyInjection\ContainerInterface; /** * Defines a default fetcher implementation. * * Uses the http_default_client service to download the feed. * * @AggregatorFetcher( * id = "aggregator", * title = @Translation("Default fetcher"), * description = @Translation("Downloads data from a URL using Drupal's HTTP request handler.") * ) */ class DefaultFetcher implements FetcherInterface, ContainerFactoryPluginInterface { /** * The HTTP client to fetch the feed data with. * * @var \Guzzle\Http\ClientInterface */ protected $httpClient; /** * Constructs a DefaultFetcher object. * * @param \Guzzle\Http\ClientInterface $http_client * A Guzzle client object. */ public function __construct(ClientInterface $http_client) { $this->httpClient = $http_client; } /** * {@inheritdoc} */ public static function create(ContainerInterface $container, array $configuration, $plugin_id, array $plugin_definition) { return new static( $container->get('http_default_client') ); } /** * {@inheritdoc} */ public function fetch(Feed $feed) { $request = $this->httpClient->get($feed->url->value); $feed->source_string = FALSE; // Generate conditional GET headers. if ($feed->etag->value) { $request->addHeader('If-None-Match', $feed->etag->value); } if ($feed->modified->value) { $request->addHeader('If-Modified-Since', gmdate(DATE_RFC1123, $feed->modified->value)); } try { $response = $request->send(); // In case of a 304 Not Modified, there is no new content, so return // FALSE. if ($response->getStatusCode() == 304) { return FALSE; } $feed->source_string = $response->getBody(TRUE); $feed->etag = $response->getEtag(); $feed->modified = strtotime($response->getLastModified()); $feed->http_headers = $response->getHeaders(); // Update the feed URL in case of a 301 redirect. if ($response->getEffectiveUrl() != $feed->url->value) { $feed->url->value = $response->getEffectiveUrl(); } return TRUE; } catch (BadResponseException $e) { $response = $e->getResponse(); watchdog('aggregator', 'The feed from %site seems to be broken because of error "%error".', array('%site' => $feed->label(), '%error' => $response->getStatusCode() . ' ' . $response->getReasonPhrase()), WATCHDOG_WARNING); drupal_set_message(t('The feed from %site seems to be broken because of error "%error".', array('%site' => $feed->label(), '%error' => $response->getStatusCode() . ' ' . $response->getReasonPhrase()))); return FALSE; } catch (RequestException $e) { watchdog('aggregator', 'The feed from %site seems to be broken because of error "%error".', array('%site' => $feed->label(), '%error' => $e->getMessage()), WATCHDOG_WARNING); drupal_set_message(t('The feed from %site seems to be broken because of error "%error".', array('%site' => $feed->label(), '%error' => $e->getMessage()))); return FALSE; } } }
nickopris/musicapp
www/core/modules/aggregator/lib/Drupal/aggregator/Plugin/aggregator/fetcher/DefaultFetcher.php
PHP
apache-2.0
3,618
#include <qpdf/CryptoRandomDataProvider.hh> #include <qpdf/QPDFCryptoProvider.hh> CryptoRandomDataProvider::CryptoRandomDataProvider() { } CryptoRandomDataProvider::~CryptoRandomDataProvider() { } void CryptoRandomDataProvider::provideRandomData(unsigned char* data, size_t len) { auto crypto = QPDFCryptoProvider::getImpl(); crypto->provideRandomData(data, len); } RandomDataProvider* CryptoRandomDataProvider::getInstance() { static CryptoRandomDataProvider instance; return &instance; }
qpdf/qpdf
libqpdf/CryptoRandomDataProvider.cc
C++
apache-2.0
511
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // To run the e2e tests against one or more hosts on gce: // $ go run run_e2e.go --logtostderr --v 2 --ssh-env gce --hosts <comma separated hosts> // To run the e2e tests against one or more images on gce and provision them: // $ go run run_e2e.go --logtostderr --v 2 --project <project> --zone <zone> --ssh-env gce --images <comma separated images> package main import ( "flag" "fmt" "io/ioutil" "math/rand" "net/http" "os" "strings" "sync" "time" "k8s.io/kubernetes/test/e2e_node" "github.com/ghodss/yaml" "github.com/golang/glog" "github.com/pborman/uuid" "golang.org/x/oauth2" "golang.org/x/oauth2/google" "google.golang.org/api/compute/v1" ) var testArgs = flag.String("test_args", "", "Space-separated list of arguments to pass to Ginkgo test runner.") var instanceNamePrefix = flag.String("instance-name-prefix", "", "prefix for instance names") var zone = flag.String("zone", "", "gce zone the hosts live in") var project = flag.String("project", "", "gce project the hosts live in") var imageConfigFile = flag.String("image-config-file", "", "yaml file describing images to run") var imageProject = flag.String("image-project", "", "gce project the hosts live in") var images = flag.String("images", "", "images to test") var hosts = flag.String("hosts", "", "hosts to test") var cleanup = flag.Bool("cleanup", true, "If true remove files from remote hosts and delete temporary instances") var deleteInstances = flag.Bool("delete-instances", true, "If true, delete any instances created") var buildOnly = flag.Bool("build-only", false, "If true, build e2e_node_test.tar.gz and exit.") var setupNode = flag.Bool("setup-node", false, "When true, current user will be added to docker group on the test machine") var instanceMetadata = flag.String("instance-metadata", "", "key/value metadata for instances separated by '=' or '<', 'k=v' means the key is 'k' and the value is 'v'; 'k<p' means the key is 'k' and the value is extracted from the local path 'p', e.g. k1=v1,k2<p2") var computeService *compute.Service type Archive struct { sync.Once path string err error } var arc Archive type TestResult struct { output string err error host string exitOk bool } // ImageConfig specifies what images should be run and how for these tests. // It can be created via the `--images` and `--image-project` flags, or by // specifying the `--image-config-file` flag, pointing to a json or yaml file // of the form: // // images: // short-name: // image: gce-image-name // project: gce-image-project type ImageConfig struct { Images map[string]GCEImage `json:"images"` } type GCEImage struct { Image string `json:"image"` Project string `json:"project"` } func main() { flag.Parse() rand.Seed(time.Now().UTC().UnixNano()) if *buildOnly { // Build the archive and exit e2e_node.CreateTestArchive() return } if *hosts == "" && *imageConfigFile == "" && *images == "" { glog.Fatalf("Must specify one of --image-config-file, --hosts, --images.") } gceImages := &ImageConfig{ Images: make(map[string]GCEImage), } if *imageConfigFile != "" { // parse images imageConfigData, err := ioutil.ReadFile(*imageConfigFile) if err != nil { glog.Fatalf("Could not read image config file provided: %v", err) } err = yaml.Unmarshal(imageConfigData, gceImages) if err != nil { glog.Fatalf("Could not parse image config file: %v", err) } } // Allow users to specify additional images via cli flags for local testing // convenience; merge in with config file if *images != "" { if *imageProject == "" { glog.Fatal("Must specify --image-project if you specify --images") } cliImages := strings.Split(*images, ",") for _, img := range cliImages { gceImages.Images[img] = GCEImage{ Image: img, Project: *imageProject, } } } if len(gceImages.Images) != 0 && *zone == "" { glog.Fatal("Must specify --zone flag") } for shortName, image := range gceImages.Images { if image.Project == "" { glog.Fatalf("Invalid config for %v; must specify a project", shortName) } } if len(gceImages.Images) != 0 { if *project == "" { glog.Fatal("Must specify --project flag to launch images into") } } if *instanceNamePrefix == "" { *instanceNamePrefix = "tmp-node-e2e-" + uuid.NewUUID().String()[:8] } // Setup coloring stat, _ := os.Stdout.Stat() useColor := (stat.Mode() & os.ModeCharDevice) != 0 blue := "" noColour := "" if useColor { blue = "\033[0;34m" noColour = "\033[0m" } go arc.getArchive() defer arc.deleteArchive() var err error computeService, err = getComputeClient() if err != nil { glog.Fatalf("Unable to create gcloud compute service using defaults. Make sure you are authenticated. %v", err) } results := make(chan *TestResult) running := 0 for shortName, image := range gceImages.Images { running++ fmt.Printf("Initializing e2e tests using image %s.\n", shortName) go func(image, imageProject string, junitFileNum int) { results <- testImage(image, imageProject, junitFileNum) }(image.Image, image.Project, running) } if *hosts != "" { for _, host := range strings.Split(*hosts, ",") { fmt.Printf("Initializing e2e tests using host %s.\n", host) running++ go func(host string, junitFileNum int) { results <- testHost(host, *cleanup, junitFileNum, *setupNode) }(host, running) } } // Wait for all tests to complete and emit the results errCount := 0 exitOk := true for i := 0; i < running; i++ { tr := <-results host := tr.host fmt.Printf("%s================================================================%s\n", blue, noColour) if tr.err != nil { errCount++ fmt.Printf("Failure Finished Host %s Test Suite\n%s\n%v\n", host, tr.output, tr.err) } else { fmt.Printf("Success Finished Host %s Test Suite\n%s\n", host, tr.output) } exitOk = exitOk && tr.exitOk fmt.Printf("%s================================================================%s\n", blue, noColour) } // Set the exit code if there were failures if !exitOk { fmt.Printf("Failure: %d errors encountered.", errCount) os.Exit(1) } } func (a *Archive) getArchive() (string, error) { a.Do(func() { a.path, a.err = e2e_node.CreateTestArchive() }) return a.path, a.err } func (a *Archive) deleteArchive() { path, err := a.getArchive() if err != nil { return } os.Remove(path) } // Run tests in archive against host func testHost(host string, deleteFiles bool, junitFileNum int, setupNode bool) *TestResult { instance, err := computeService.Instances.Get(*project, *zone, host).Do() if err != nil { return &TestResult{ err: err, host: host, exitOk: false, } } if strings.ToUpper(instance.Status) != "RUNNING" { err = fmt.Errorf("instance %s not in state RUNNING, was %s.", host, instance.Status) return &TestResult{ err: err, host: host, exitOk: false, } } externalIp := getExternalIp(instance) if len(externalIp) > 0 { e2e_node.AddHostnameIp(host, externalIp) } path, err := arc.getArchive() if err != nil { // Don't log fatal because we need to do any needed cleanup contained in "defer" statements return &TestResult{ err: fmt.Errorf("unable to create test archive %v.", err), } } output, exitOk, err := e2e_node.RunRemote(path, host, deleteFiles, junitFileNum, setupNode, *testArgs) return &TestResult{ output: output, err: err, host: host, exitOk: exitOk, } } // Provision a gce instance using image and run the tests in archive against the instance. // Delete the instance afterward. func testImage(image, imageProject string, junitFileNum int) *TestResult { host, err := createInstance(image, imageProject) if *deleteInstances { defer deleteInstance(image) } if err != nil { return &TestResult{ err: fmt.Errorf("unable to create gce instance with running docker daemon for image %s. %v", image, err), } } // Only delete the files if we are keeping the instance and want it cleaned up. // If we are going to delete the instance, don't bother with cleaning up the files deleteFiles := !*deleteInstances && *cleanup return testHost(host, deleteFiles, junitFileNum, *setupNode) } // Provision a gce instance using image func createInstance(image, imageProject string) (string, error) { name := imageToInstanceName(image) i := &compute.Instance{ Name: name, MachineType: machineType(), NetworkInterfaces: []*compute.NetworkInterface{ { AccessConfigs: []*compute.AccessConfig{ { Type: "ONE_TO_ONE_NAT", Name: "External NAT", }, }}, }, Disks: []*compute.AttachedDisk{ { AutoDelete: true, Boot: true, Type: "PERSISTENT", InitializeParams: &compute.AttachedDiskInitializeParams{ SourceImage: sourceImage(image, imageProject), }, }, }, } if *instanceMetadata != "" { glog.V(2).Infof("parsing instance metadata: %q", *instanceMetadata) raw := parseInstanceMetadata(*instanceMetadata) glog.V(3).Infof("parsed instance metadata: %v", raw) i.Metadata = &compute.Metadata{} metadata := []*compute.MetadataItems{} for k, v := range raw { val := v metadata = append(metadata, &compute.MetadataItems{ Key: k, Value: &val, }) } i.Metadata.Items = metadata } op, err := computeService.Instances.Insert(*project, *zone, i).Do() if err != nil { return "", err } if op.Error != nil { return "", fmt.Errorf("could not create instance %s: %+v", name, op.Error) } instanceRunning := false for i := 0; i < 30 && !instanceRunning; i++ { if i > 0 { time.Sleep(time.Second * 20) } var instance *compute.Instance instance, err = computeService.Instances.Get(*project, *zone, name).Do() if err != nil { continue } if strings.ToUpper(instance.Status) != "RUNNING" { err = fmt.Errorf("instance %s not in state RUNNING, was %s.", name, instance.Status) continue } externalIp := getExternalIp(instance) if len(externalIp) > 0 { e2e_node.AddHostnameIp(name, externalIp) } var output string output, err = e2e_node.RunSshCommand("ssh", e2e_node.GetHostnameOrIp(name), "--", "sudo", "docker", "version") if err != nil { err = fmt.Errorf("instance %s not running docker daemon - Command failed: %s", name, output) continue } if !strings.Contains(output, "Server") { err = fmt.Errorf("instance %s not running docker daemon - Server not found: %s", name, output) continue } instanceRunning = true } return name, err } func getExternalIp(instance *compute.Instance) string { for i := range instance.NetworkInterfaces { ni := instance.NetworkInterfaces[i] for j := range ni.AccessConfigs { ac := ni.AccessConfigs[j] if len(ac.NatIP) > 0 { return ac.NatIP } } } return "" } func getComputeClient() (*compute.Service, error) { const retries = 10 const backoff = time.Second * 6 // Setup the gce client for provisioning instances // Getting credentials on gce jenkins is flaky, so try a couple times var err error var cs *compute.Service for i := 0; i < retries; i++ { if i > 0 { time.Sleep(backoff) } var client *http.Client client, err = google.DefaultClient(oauth2.NoContext, compute.ComputeScope) if err != nil { continue } cs, err = compute.New(client) if err != nil { continue } return cs, nil } return nil, err } func deleteInstance(image string) { _, err := computeService.Instances.Delete(*project, *zone, imageToInstanceName(image)).Do() if err != nil { glog.Infof("Error deleting instance %s", imageToInstanceName(image)) } } func parseInstanceMetadata(str string) map[string]string { metadata := make(map[string]string) ss := strings.Split(str, ",") for _, s := range ss { kv := strings.Split(s, "=") if len(kv) == 2 { metadata[kv[0]] = kv[1] continue } kp := strings.Split(s, "<") if len(kp) != 2 { glog.Fatalf("Invalid instance metadata: %q", s) continue } v, err := ioutil.ReadFile(kp[1]) if err != nil { glog.Fatalf("Failed to read metadata file %q: %v", kp[1], err) continue } metadata[kp[0]] = string(v) } return metadata } func imageToInstanceName(image string) string { return *instanceNamePrefix + "-" + image } func sourceImage(image, imageProject string) string { return fmt.Sprintf("projects/%s/global/images/%s", imageProject, image) } func machineType() string { return fmt.Sprintf("zones/%s/machineTypes/n1-standard-1", *zone) }
MustWin/kubernetes
test/e2e_node/runner/run_e2e.go
GO
apache-2.0
13,026
package com.ctrip.zeus.service.model.handler.impl; import com.ctrip.zeus.dao.entity.SlbGroupStatusR; import com.ctrip.zeus.dao.entity.SlbGroupStatusRExample; import com.ctrip.zeus.dao.entity.SlbGroupVsR; import com.ctrip.zeus.dao.entity.SlbGroupVsRExample; import com.ctrip.zeus.dao.mapper.SlbGroupStatusRMapper; import com.ctrip.zeus.dao.mapper.SlbGroupVsRMapper; import com.ctrip.zeus.model.model.Group; import com.ctrip.zeus.model.model.GroupVirtualServer; import com.ctrip.zeus.model.model.VirtualServer; import com.ctrip.zeus.service.SmartGroupStatusRMapper; import com.ctrip.zeus.service.model.IdVersion; import com.ctrip.zeus.service.model.handler.model.GroupVirtualServerContent; import com.ctrip.zeus.support.DefaultObjectJsonParser; import com.ctrip.zeus.support.DefaultObjectJsonWriter; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.nio.charset.StandardCharsets; import java.util.*; /** * Created by zhoumy on 2015/12/22. */ @Component("slbGroupVsRelMaintainer") public class SlbGroupVsRelMaintainer extends AbstractMultiRelMaintainer<SlbGroupVsR, GroupVirtualServer, Group> { @Resource private SlbGroupVsRMapper slbGroupVsRMapper; @Resource private SmartGroupStatusRMapper smartGroupStatusRMapper; @Resource private SlbGroupStatusRMapper slbGroupStatusRMapper; public SlbGroupVsRelMaintainer() { super(SlbGroupVsR.class, Group.class); } @Override public void updateByPrimaryKey(SlbGroupVsR[] values) throws Exception { if (values == null || values.length == 0) return; slbGroupVsRMapper.batchUpdate(Arrays.asList(values)); } @Override protected IdVersion getIdxKey(SlbGroupVsR rel) throws Exception { return new IdVersion(rel.getGroupId(), rel.getGroupVersion()); } @Override protected void setDo(Group object, GroupVirtualServer value, SlbGroupVsR target) throws Exception { GroupVirtualServerContent gvsContent = DefaultObjectJsonParser.parse(DefaultObjectJsonWriter.write(value), GroupVirtualServerContent.class); gvsContent.setGroupId(object.getId()) .setGroupVersion(object.getVersion()) .setVirtualServer(new VirtualServer().setId(value.getVirtualServer().getId())); target.setGroupId(object.getId()); target.setVsId(value.getVirtualServer().getId()); target.setGroupVersion(object.getVersion()); target.setPriority(0); target.setContent(DefaultObjectJsonWriter.write(gvsContent).getBytes(StandardCharsets.UTF_8)); } @Override protected List<SlbGroupVsR> getRelsByObjectId(Group object) throws Exception { return slbGroupVsRMapper.selectByExampleWithBLOBs(new SlbGroupVsRExample().createCriteria().andGroupIdEqualTo(object.getId()).example()); } @Override protected List<SlbGroupVsR> getRelsByObjectId(Long[] objectIds) throws Exception { if (objectIds == null || objectIds.length == 0) return Collections.EMPTY_LIST; return slbGroupVsRMapper.selectByExampleWithBLOBs(new SlbGroupVsRExample().createCriteria().andGroupIdIn(Arrays.asList(objectIds)).example()); } @Override protected Integer[] getStatusByObjectId(Group object) throws Exception { SlbGroupStatusR d = slbGroupStatusRMapper.selectOneByExample(new SlbGroupStatusRExample().createCriteria().andGroupIdEqualTo(object.getId()).example()); return new Integer[]{d.getOfflineVersion(), d.getOnlineVersion()}; } @Override protected Map<Long, Integer[]> getStatusByObjectId(Long[] objectIds) throws Exception { Map<Long, Integer[]> result = new HashMap<>(); if (objectIds == null || objectIds.length == 0) return result; for (SlbGroupStatusR d : smartGroupStatusRMapper.selectByExample(new SlbGroupStatusRExample().createCriteria().andGroupIdIn(Arrays.asList(objectIds)).example())) { result.put(d.getGroupId(), new Integer[]{d.getOfflineVersion(), d.getOnlineVersion()}); } return result; } @Override public void insert(SlbGroupVsR[] values) throws Exception { if (values == null || values.length == 0) return; slbGroupVsRMapper.batchInsert(Arrays.asList(values)); } @Override public void deleteByPrimaryKey(SlbGroupVsR[] values) throws Exception { if (values == null || values.length == 0) return; slbGroupVsRMapper.batchDelete(Arrays.asList(values)); } @Override public void clear(Long objectId) throws Exception { slbGroupVsRMapper.deleteByExample(new SlbGroupVsRExample().createCriteria().andGroupIdEqualTo(objectId).example()); } @Override public List<GroupVirtualServer> get(Group object) throws Exception { return object.getGroupVirtualServers(); } }
sdgdsffdsfff/zeus
slb/src/main/java/com/ctrip/zeus/service/model/handler/impl/SlbGroupVsRelMaintainer.java
Java
apache-2.0
4,958
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using System.Management.Automation; using Microsoft.Azure.Commands.Common.Authentication.Models; using Microsoft.Azure.Commands.ResourceManager.Common; using Microsoft.Azure.Commands.Sql.Services; using Microsoft.Azure.ServiceManagemenet.Common.Models; using Microsoft.WindowsAzure.Commands.Utilities.Common; namespace Microsoft.Azure.Commands.Sql.Common { /// <summary> /// The base class for all Azure Sql cmdlets /// </summary> public abstract class AzureSqlCmdletBase<M, A> : AzureRMCmdlet { /// <summary> /// Stores the per request session Id for all request made in this cmdlet call. /// </summary> protected string clientRequestId { get; set; } /// <summary> /// Default constructor. Generates a request ID /// </summary> internal AzureSqlCmdletBase() { this.clientRequestId = Util.GenerateTracingId(); } /// <summary> /// Gets or sets the name of the resource group to use. /// </summary> [Parameter(Mandatory = true, ValueFromPipelineByPropertyName = true, Position = 0, HelpMessage = "The name of the resource group")] [ValidateNotNullOrEmpty] public string ResourceGroupName { get; set; } /// <summary> /// The ModelAdapter object used by this cmdlet /// </summary> public A ModelAdapter { get; internal set; } /// <summary> /// Gets an entity from the service /// </summary> /// <returns>A model object</returns> protected abstract M GetEntity(); /// <summary> /// Updates the given model element with the cmdlet specific operation /// </summary> /// <param name="model">A model object</param> protected virtual M ApplyUserInputToModel(M model) { return model; } /// <summary> /// This method is responsible to call the right API in the communication layer that will eventually send the information in the /// object to the REST endpoint /// </summary> /// <param name="entity">The model object with the data to be sent to the REST endpoints</param> protected virtual M PersistChanges(M entity) { return default(M); } /// <summary> /// Returns true if the model object that was constructed by this cmdlet should be written out /// </summary> /// <returns>True if the model object should be written out, False otherwise</returns> protected virtual bool WriteResult() { return true; } /// <summary> /// Creation and initialization of the ModelAdapter object /// </summary> /// <param name="subscription">The AzureSubscription in which the current execution is performed</param> /// <returns>An initialized and ready to use ModelAdapter object</returns> protected abstract A InitModelAdapter(AzureSubscription subscription); /// <summary> /// Transforms the given model object to be an object that is written out /// </summary> /// <param name="model">The about to be written model object</param> /// <returns>The prepared object to be written out</returns> protected virtual object TransformModelToOutputObject(M model) { return model; } /// <summary> /// Executes the cmdlet /// </summary> public override void ExecuteCmdlet() { ModelAdapter = InitModelAdapter(DefaultProfile.Context.Subscription); M model = this.GetEntity(); M updatedModel = this.ApplyUserInputToModel(model); M responseModel = this.PersistChanges(updatedModel); if(responseModel != null) { if (WriteResult()) { this.WriteObject(TransformModelToOutputObject(responseModel), true); } } else { if (WriteResult()) { this.WriteObject(TransformModelToOutputObject(updatedModel)); } } } } }
dulems/azure-powershell
src/ResourceManager/Sql/Commands.Sql/Common/AzureSqlCmdletBase.cs
C#
apache-2.0
4,968
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.toolWindow; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.UISettingsListener; import com.intellij.openapi.util.Key; import com.intellij.openapi.wm.ToolWindowAnchor; import com.intellij.openapi.wm.impl.AbstractDroppableStripe; import com.intellij.openapi.wm.impl.ToolWindowImpl; import com.intellij.ui.ClientProperty; import com.intellij.ui.ColorUtil; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.paint.LinePainter2D; import com.intellij.util.ui.JBSwingUtilities; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.StartupUiUtil; import com.intellij.util.ui.UIUtil; import org.intellij.lang.annotations.MagicConstant; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import java.awt.*; /** * @author Eugene Belyaev */ final class Stripe extends AbstractDroppableStripe implements UISettingsListener { static final Key<Rectangle> VIRTUAL_BOUNDS = Key.create("Virtual stripe bounds"); @MagicConstant(intValues = {SwingConstants.CENTER, SwingConstants.TOP, SwingConstants.LEFT, SwingConstants.BOTTOM, SwingConstants.RIGHT}) private final int anchor; Stripe(@MagicConstant(intValues = {SwingConstants.CENTER, SwingConstants.TOP, SwingConstants.LEFT, SwingConstants.BOTTOM, SwingConstants.RIGHT}) int anchor) { super(new GridBagLayout()); setOpaque(true); this.anchor = anchor; setBorder(new AdaptiveBorder()); } @Override public boolean isNewStripes() { return false; } @Override public @NotNull Dimension getPreferredSize() { if (computedPreferredSize == null) { computedPreferredSize = getButtons().isEmpty() ? JBUI.emptySize() : recomputeBounds(false, null, false).size; } return computedPreferredSize; } @Override protected ToolWindowImpl getToolWindowFor(@NotNull JComponent component) { return ((StripeButton)component).getToolWindow$intellij_platform_ide_impl(); } @Override public void uiSettingsChanged(@NotNull UISettings uiSettings) { updatePresentation(); } private static final class AdaptiveBorder implements Border { @Override public void paintBorder(@NotNull Component c, Graphics g, int x, int y, int width, int height) { Insets insets = ((JComponent)c).getInsets(); g.setColor(UIUtil.CONTRAST_BORDER_COLOR); drawBorder((Graphics2D)g, x, y, width, height, insets); } private static void drawBorder(Graphics2D g, int x, int y, int width, int height, Insets insets) { if (insets.top == 1) { LinePainter2D.paint(g, x, y, x + width, y); } if (insets.right == 1) { LinePainter2D.paint(g, x + width - 1, y, x + width - 1, y + height); } if (insets.left == 1) { LinePainter2D.paint(g, x, y, x, y + height); } if (insets.bottom == 1) { LinePainter2D.paint(g, x, y + height - 1, x + width, y + height - 1); } if (!StartupUiUtil.isUnderDarcula()) { return; } Color c = g.getColor(); if (insets.top == 2) { g.setColor(c); LinePainter2D.paint(g, x, y, x + width, y); g.setColor(Gray._85); LinePainter2D.paint(g, x, y + 1, x + width, y + 1); } if (insets.right == 2) { g.setColor(Gray._85); LinePainter2D.paint(g, x + width - 1, y, x + width - 1, y + height); g.setColor(c); LinePainter2D.paint(g, x + width - 2, y, x + width - 2, y + height); } if (insets.left == 2) { g.setColor(Gray._85); LinePainter2D.paint(g, x + 1, y, x + 1, y + height); g.setColor(c); LinePainter2D.paint(g, x, y, x, y + height); } } @SuppressWarnings("UseDPIAwareInsets") @Override public Insets getBorderInsets(@NotNull Component c) { Stripe stripe = (Stripe)c; ToolWindowAnchor anchor = stripe.getAnchor(); if (anchor == ToolWindowAnchor.LEFT) { return new Insets(1, 0, 0, 1); } else if (anchor == ToolWindowAnchor.RIGHT) { return new Insets(1, 1, 0, 0); } else if (anchor == ToolWindowAnchor.TOP) { return new Insets(1, 0, 0, 0); } else { return new Insets(1, 0, 0, 0); } } @Override public boolean isBorderOpaque() { return true; } } @Override public @NotNull ToolWindowAnchor getAnchor() { return ToolWindowAnchor.get(anchor); } public void startDrag() { revalidate(); repaint(); } public void stopDrag() { revalidate(); repaint(); } @Override public @Nullable StripeButtonManager getButtonFor(@NotNull String toolWindowId) { for (StripeButtonManager it : getButtons()) { if (it.getId().equals(toolWindowId)) { return it; } } return null; } public void setOverlaid(boolean overlaid) { Color bg = JBColor.PanelBackground; if (overlaid) { setBackground(ColorUtil.toAlpha(bg, 190)); } else { setBackground(bg); } } @Override public boolean isHorizontal() { return anchor == SwingConstants.TOP || anchor == SwingConstants.BOTTOM; } @Override public boolean containsPoint(@NotNull Point screenPoint) { Point point = screenPoint.getLocation(); SwingUtilities.convertPointFromScreen(point, isVisible() ? this : getParent()); int width = getWidth(); int height = getHeight(); if (!isVisible()) { Rectangle bounds = ClientProperty.get(this, VIRTUAL_BOUNDS); if (bounds != null) { point.x -= bounds.x; point.y -= bounds.y; width = bounds.width; height = bounds.height; } } int areaSize = Math.min(Math.min(getParent().getWidth() / 2, getParent().getHeight() / 2), JBUI.scale(DROP_DISTANCE_SENSITIVITY)); Point[] points = {new Point(0, 0), new Point(width, 0), new Point(width, height), new Point(0, height)}; switch (anchor) { //Top area should be is empty due to IDEA-271100 case SwingConstants.TOP: { updateLocation(points, 1, 2, 0, 0, areaSize); updateLocation(points, 0, 3, 0, 0, areaSize); break; } case SwingConstants.LEFT: { updateLocation(points, 0, 1, 1, 0, areaSize); updateLocation(points, 3, 2, 1, -1, areaSize); break; } case SwingConstants.BOTTOM: { updateLocation(points, 3, 0, 1, -1, areaSize); updateLocation(points, 2, 1, -1, -1, areaSize); break; } case SwingConstants.RIGHT: { updateLocation(points, 1, 0, -1, 0, areaSize); updateLocation(points, 2, 3, -1, 1, areaSize); } } return new Polygon(new int[]{points[0].x, points[1].x, points[2].x, points[3].x}, new int[]{points[0].y, points[1].y, points[2].y, points[3].y}, 4).contains(point); } private static void updateLocation(Point[] points, int indexBase, int indexDest, int xSign, int ySign, int areaSize) { points[indexDest].setLocation(points[indexBase].x + xSign * areaSize, points[indexBase].y + ySign * areaSize); } @Override public String toString() { @NonNls String anchor = null; switch (this.anchor) { case SwingConstants.TOP: anchor = "TOP"; break; case SwingConstants.BOTTOM: anchor = "BOTTOM"; break; case SwingConstants.LEFT: anchor = "LEFT"; break; case SwingConstants.RIGHT: anchor = "RIGHT"; break; } return getClass().getName() + " " + anchor; } @Override protected Graphics getComponentGraphics(Graphics g) { return JBSwingUtilities.runGlobalCGTransform(this, super.getComponentGraphics(g)); } @Override protected void paintComponent(@NotNull Graphics g) { super.paintComponent(g); if (!StartupUiUtil.isUnderDarcula()) { ToolWindowAnchor anchor = getAnchor(); g.setColor(Gray._255.withAlpha(40)); Rectangle r = getBounds(); if (anchor == ToolWindowAnchor.LEFT || anchor == ToolWindowAnchor.RIGHT) { LinePainter2D.paint((Graphics2D)g, 0, 0, 0, r.height); LinePainter2D.paint((Graphics2D)g, r.width - 2, 0, r.width - 2, r.height); } else { LinePainter2D.paint((Graphics2D)g, 0, 1, r.width, 1); LinePainter2D.paint((Graphics2D)g, 0, r.height - 1, r.width, r.height - 1); } } } }
jwren/intellij-community
platform/platform-impl/src/com/intellij/toolWindow/Stripe.java
Java
apache-2.0
8,601
/** * Provides annotations represents feature. */ package com.develhack.annotation.feature;
develhack/core
src/main/java/com/develhack/annotation/feature/package-info.java
Java
apache-2.0
93
#!/bin/bash -eux echo "==> Recording box generation date" date > /etc/vagrant_box_build_date echo "==> Customizing message of the day" MOTD_FILE=/etc/motd BANNER_WIDTH=64 PLATFORM_RELEASE=$(sed 's/^.\+ release \([.0-9]\+\).*/\1/' /etc/redhat-release) PLATFORM_MSG=$(printf 'Oracle Linux %s' "$PLATFORM_RELEASE") BUILT_MSG=$(printf 'built %s' $(date +%Y-%m-%d)) printf '%0.1s' "-"{1..64} > ${MOTD_FILE} printf '\n' >> ${MOTD_FILE} printf '%2s%-30s%30s\n' " " "${PLATFORM_MSG}" "${BUILT_MSG}" >> ${MOTD_FILE} printf '%0.1s' "-"{1..64} >> ${MOTD_FILE} printf '\n' >> ${MOTD_FILE}
boxcutter/oraclelinux
script/motd.sh
Shell
apache-2.0
579