text
stringlengths
7
1.01M
/* * Copyright (c) 2001, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package nsk.jdwp.ClassType.SetValues; import java.io.*; import nsk.share.*; import nsk.share.jpda.*; import nsk.share.jdwp.*; /** * Test for JDWP command: ClassType.SetValues. * * See setvalues001.README for description of test execution. * * Test is executed by invoking method runIt(). * JDWP command is tested in method testCommand(). * * @see #runIt() * @see #testCommand() */ public class setvalues001 { // exit status constants static final int JCK_STATUS_BASE = 95; static final int PASSED = 0; static final int FAILED = 2; // communication signals constants static final String READY = "ready"; static final String RUN = "run"; static final String DONE = "done"; static final String ERROR = "error"; static final String QUIT = "quit"; // package and classes names constants static final String PACKAGE_NAME = "nsk.jdwp.ClassType.SetValues"; static final String TEST_CLASS_NAME = PACKAGE_NAME + "." + "setvalues001"; static final String DEBUGEE_CLASS_NAME = TEST_CLASS_NAME + "a"; // tested JDWP command constants static final String JDWP_COMMAND_NAME = "ClassType.SetValues"; static final int JDWP_COMMAND_ID = JDWP.Command.ClassType.SetValues; // tested class name and signature constants static final String TESTED_CLASS_NAME = DEBUGEE_CLASS_NAME + "$" + "TestedClass"; static final String TESTED_CLASS_SIGNATURE = "L" + TESTED_CLASS_NAME.replace('.', '/') + ";"; // target values class name and signature constants static final String TARGET_VALUES_CLASS_NAME = DEBUGEE_CLASS_NAME + "$" + "TargetValuesClass"; static final String TARGET_VALUES_CLASS_SIGNATURE = "L" + TARGET_VALUES_CLASS_NAME.replace('.', '/') + ";"; // usual scaffold objects ArgumentHandler argumentHandler = null; Log log = null; Binder binder = null; Debugee debugee = null; Transport transport = null; IOPipe pipe = null; // test passed or not boolean success = true; // ------------------------------------------------------------------- /** * Start test from command line. */ public static void main (String argv[]) { System.exit(run(argv,System.out) + JCK_STATUS_BASE); } /** * Start JCK-compilant test. */ public static int run(String argv[], PrintStream out) { return new setvalues001().runIt(argv, out); } // ------------------------------------------------------------------- /** * Perform test execution. */ public int runIt(String argv[], PrintStream out) { // make log for debugger messages argumentHandler = new ArgumentHandler(argv); log = new Log(out, argumentHandler); // execute test and display results try { log.display("\n>>> Preparing debugee for testing \n"); // launch debugee binder = new Binder(argumentHandler, log); log.display("Launching debugee"); debugee = binder.bindToDebugee(DEBUGEE_CLASS_NAME); transport = debugee.getTransport(); pipe = debugee.createIOPipe(); // make debuggee ready for testing prepareDebugee(); // work with prepared debugee try { log.display("\n>>> Obtaining requred data from debugee \n"); // query debugee for classID for the class with target values log.display("Getting classID for class with target values by signature:\n" + " " + TARGET_VALUES_CLASS_SIGNATURE); long targetValuesClassID = debugee.getReferenceTypeID(TARGET_VALUES_CLASS_SIGNATURE); log.display(" got classID: " + targetValuesClassID); // query debugee for fieldIDs of the class static fields log.display("Getting fieldIDs for static fields of the class"); long targetValuesFieldIDs[] = queryClassFieldIDs(targetValuesClassID); log.display(" got fields: " + targetValuesFieldIDs.length); int count = targetValuesFieldIDs.length; // query debugee for values of the fields log.display("Getting values of the static fields"); JDWP.Value targetValues[] = queryClassFieldValues(targetValuesClassID, targetValuesFieldIDs); log.display(" got values: " + targetValues.length); if (targetValues.length != count) { throw new Failure("Unexpected number of static fields values received: " + targetValues.length + "(expected: " + count + ")"); } // query debugee for classID of the tested class log.display("Getting tested classID by signature:\n" + " " + TESTED_CLASS_SIGNATURE); long testedClassID = debugee.getReferenceTypeID(TESTED_CLASS_SIGNATURE); log.display(" got classID: " + testedClassID); // query debugee for fieldIDs of tested class static fields log.display("Getting fieldIDs for static fields of the tested class"); long testedFieldIDs[] = queryClassFieldIDs(testedClassID); log.display(" got fields: " + testedFieldIDs.length); if (testedFieldIDs.length != count) { throw new Failure("Unexpected number of static fields of tested class received: " + testedFieldIDs.length + "(expected: " + count + ")"); } // perform testing JDWP command log.display("\n>>> Testing JDWP command \n"); testCommand(testedClassID, testedFieldIDs, targetValues); // check confirmation from debuggee that values have been set properly log.display("\n>>> Checking that the values have been set properly \n"); checkValuesChanged(); } finally { // quit debugee log.display("\n>>> Finishing test \n"); quitDebugee(); } } catch (Failure e) { log.complain("TEST FAILED: " + e.getMessage()); e.printStackTrace(out); success = false; } catch (Exception e) { log.complain("Caught unexpected exception:\n" + e); e.printStackTrace(out); success = false; } if (!success) { log.complain("TEST FAILED"); return FAILED; } out.println("TEST PASSED"); return PASSED; } /** * Prepare debugee for testing and waiting for ready signal. */ void prepareDebugee() { // wait for VM_INIT event from debugee log.display("Waiting for VM_INIT event"); debugee.waitForVMInit(); // query debugee for VM-dependent ID sizes log.display("Querying for IDSizes"); debugee.queryForIDSizes(); // resume initially suspended debugee log.display("Resuming debugee VM"); debugee.resume(); // wait for READY signal from debugee log.display("Waiting for signal from debugee: " + READY); String signal = pipe.readln(); log.display("Received signal from debugee: " + signal); if (! signal.equals(READY)) { throw new TestBug("Unexpected signal received form debugee: " + signal + " (expected: " + READY + ")"); } } /** * Sending debugee signal to quit and waiting for it exits. */ void quitDebugee() { // send debugee signal to quit log.display("Sending signal to debugee: " + QUIT); pipe.println(QUIT); // wait for debugee exits log.display("Waiting for debugee exits"); int code = debugee.waitFor(); // analize debugee exit status code if (code == JCK_STATUS_BASE + PASSED) { log.display("Debugee PASSED with exit code: " + code); } else { log.complain("Debugee FAILED with exit code: " + code); success = false; } } /** * Query debugee for fieldID's of the class static fields. */ long[] queryClassFieldIDs(long classID) { // compose ReferenceType.Fields command packet CommandPacket command = new CommandPacket(JDWP.Command.ReferenceType.Fields); command.addReferenceTypeID(classID); command.setLength(); // send the command and receive reply ReplyPacket reply = debugee.receiveReplyFor(command); // extract fieldIDs from the reply packet try { reply.resetPosition(); int declared = reply.getInt(); long[] fieldIDs = new long[declared]; for (int i = 0; i < declared; i++ ) { long fieldID = reply.getFieldID(); String name = reply.getString(); String signature = reply.getString(); int modBits = reply.getInt(); fieldIDs[i] = fieldID; } return fieldIDs; } catch (BoundException e) { log.complain("Unable to parse reply packet for ReferenceType.Fields command:\n\t" + e); log.complain("Received reply packet:\n" + reply); throw new Failure("Error occured while getting static fieldIDs for classID: " + classID); } } /** * Query debugee for values of the class static fields. */ JDWP.Value[] queryClassFieldValues(long classID, long fieldIDs[]) { // compose ReferenceType.Fields command packet int count = fieldIDs.length; CommandPacket command = new CommandPacket(JDWP.Command.ReferenceType.GetValues); command.addReferenceTypeID(classID); command.addInt(count); for (int i = 0; i < count; i++) { command.addFieldID(fieldIDs[i]); } command.setLength(); // send the command and receive reply ReplyPacket reply = debugee.receiveReplyFor(command); // extract values from the reply packet try { reply.resetPosition(); int valuesCount = reply.getInt(); JDWP.Value values[] = new JDWP.Value[valuesCount]; for (int i = 0; i < valuesCount; i++ ) { JDWP.Value value = reply.getValue(); values[i] = value; } return values; } catch (BoundException e) { log.complain("Unable to parse reply packet for ReferenceType.GetValues command:\n\t" + e); log.complain("Received reply packet:\n" + reply); throw new Failure("Error occured while getting static fields values for classID: " + classID); } } /** * Perform testing JDWP command for specified classID. */ void testCommand(long classID, long fieldIDs[], JDWP.Value values[]) { int count = fieldIDs.length; // create command packet log.display("Create command packet:"); log.display("Command: " + JDWP_COMMAND_NAME); CommandPacket command = new CommandPacket(JDWP_COMMAND_ID); // add out data to the command packet log.display(" classID: " + classID); command.addReferenceTypeID(classID); log.display(" values: " + count); command.addInt(count); for (int i = 0; i < count; i++) { log.display(" field #" + i +":"); log.display(" fieldID: " + fieldIDs[i]); command.addFieldID(fieldIDs[i]); JDWP.Value value = values[i]; JDWP.UntaggedValue untaggedValue = new JDWP.UntaggedValue(value.getValue()); log.display(" untagged_value: " + untaggedValue.getValue()); command.addUntaggedValue(untaggedValue, value.getTag()); } command.setLength(); // send command packet to debugee try { log.display("Sending command packet:\n" + command); transport.write(command); } catch (IOException e) { log.complain("Unable to send command packet:\n" + e); success = false; return; } ReplyPacket reply = new ReplyPacket(); // receive reply packet from debugee try { log.display("Waiting for reply packet"); transport.read(reply); log.display("Reply packet received:\n" + reply); } catch (IOException e) { log.complain("Unable to read reply packet:\n" + e); success = false; return; } // check reply packet header try{ log.display("Checking reply packet header"); reply.checkHeader(command.getPacketID()); } catch (BoundException e) { log.complain("Bad header of reply packet: " + e.getMessage()); success = false; } // start parsing reply packet data log.display("Parsing reply packet:"); reply.resetPosition(); // no data to extract // check for extra data in reply packet if (! reply.isParsed()) { log.complain("Extra trailing bytes found in reply packet at: " + "0x" + reply.toHexString(reply.currentDataPosition(), 4)); success = false; } } /** * Check confiramtion from debuggee that values are changed. */ void checkValuesChanged() { // send debugee signal RUN log.display("Sending signal to debugee: " + RUN); pipe.println(RUN); // wait for DONE signal from debugee log.display("Waiting for signal from debugee: " + DONE); String signal = pipe.readln(); log.display("Received signal from debugee: " + signal); // check received signal if (signal == null) { throw new TestBug("<null> signal received from debugee: " + signal + " (expected: " + DONE + ")"); } else if (signal.equals(DONE)) { log.display("All static fields values have been correctly set into debuggee VM"); } else if (signal.equals(ERROR)) { log.complain("Not all static fields values have been correctly set into debuggee VM"); success = false; } else { throw new TestBug("Unexpected signal received from debugee: " + signal + " (expected: " + DONE + ")"); } } }
package com.bytatech.ayoos.consultation.service.mapper; import com.bytatech.ayoos.consultation.domain.*; import com.bytatech.ayoos.consultation.service.dto.SymptomDTO; import org.mapstruct.*; /** * Mapper for the entity {@link Symptom} and its DTO {@link SymptomDTO}. */ @Mapper(componentModel = "spring", uses = {ConsultationMapper.class}) public interface SymptomMapper extends EntityMapper<SymptomDTO, Symptom> { @Mapping(source = "consultation.id", target = "consultationId") SymptomDTO toDto(Symptom symptom); @Mapping(source = "consultationId", target = "consultation") Symptom toEntity(SymptomDTO symptomDTO); default Symptom fromId(Long id) { if (id == null) { return null; } Symptom symptom = new Symptom(); symptom.setId(id); return symptom; } }
package com.wangxl.yikemall.member.service.impl; import org.springframework.stereotype.Service; import java.util.Map; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.wangxl.common.utils.PageUtils; import com.wangxl.common.utils.Query; import com.wangxl.yikemall.member.dao.MemberLoginLogDao; import com.wangxl.yikemall.member.entity.MemberLoginLogEntity; import com.wangxl.yikemall.member.service.MemberLoginLogService; @Service("umsMemberLoginLogService") public class MemberLoginLogServiceImpl extends ServiceImpl<MemberLoginLogDao, MemberLoginLogEntity> implements MemberLoginLogService { @Override public PageUtils queryPage(Map<String, Object> params) { IPage<MemberLoginLogEntity> page = this.page( new Query<MemberLoginLogEntity>().getPage(params), new QueryWrapper<MemberLoginLogEntity>() ); return new PageUtils(page); } }
/** * */ package org.funsoft.remoteagent.gui.component; import javax.swing.*; import java.awt.*; import java.awt.event.InputEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; /** * @author htb * */ public class DefaultJList<E> extends JList<E> { public DefaultJList() { // to avoid selection change when clicking on the blank area of the list addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { JList<E> list = (JList<E>) e.getSource(); if ((list.locationToIndex(e.getPoint()) == -1) && !e.isShiftDown() && !isMenuShortcutKeyDown(e)) { list.clearSelection(); } } private boolean isMenuShortcutKeyDown(InputEvent event) { return (event.getModifiers() & Toolkit.getDefaultToolkit() .getMenuShortcutKeyMask()) != 0; } }); } // to avoid selection change when clicking on the blank area of the list @Override public int locationToIndex(Point location) { int index = super.locationToIndex(location); if ((index != -1) && !getCellBounds(index, index).contains(location)) { return -1; } else { return index; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.example.brooklyn.test.osgi.entities; import org.apache.brooklyn.core.entity.AbstractApplication; import org.apache.brooklyn.core.entity.StartableApplication; public class SimpleApplicationImpl extends AbstractApplication implements StartableApplication, SimpleApplication { }
package io.micronaut.servlet.http; import io.micronaut.core.annotation.Internal; import io.micronaut.core.type.Argument; import io.micronaut.http.HttpRequestWrapper; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.util.Objects; import java.util.Optional; /** * Internal class that represents an {@link ServletHttpRequest} that includes the body already decoded. * * @param <N> The native request type * @param <B> The body type * @author graemerocher * @since 2.0.0 */ @Internal final class ServletRequestAndBody<N, B> extends HttpRequestWrapper<B> implements ServletHttpRequest<N, B> { private final Argument<B> bodyType; /** * @param delegate The Http Request * @param bodyType The body, never null */ ServletRequestAndBody(ServletHttpRequest<N, B> delegate, Argument<B> bodyType) { super(delegate); this.bodyType = Objects.requireNonNull(bodyType, "Body type cannot be null"); } @Override public boolean isAsyncSupported() { return ((ServletHttpRequest<N, B>) getDelegate()).isAsyncSupported(); } @Override public Optional<B> getBody() { return getBody(bodyType); } @Override public InputStream getInputStream() throws IOException { return ((ServletHttpRequest<N, B>) getDelegate()).getInputStream(); } @Override public BufferedReader getReader() throws IOException { return ((ServletHttpRequest<N, B>) getDelegate()).getReader(); } @Override public N getNativeRequest() { return ((ServletHttpRequest<N, B>) getDelegate()).getNativeRequest(); } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.event; import org.apache.camel.CamelContext; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class EventEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { EventEndpoint target = (EventEndpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "basicpropertybinding": case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "exceptionhandler": case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true; case "exchangepattern": case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true; default: return false; } } }
// Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems.LEDs; import edu.wpi.first.wpilibj2.command.SubsystemBase; public interface LedsIO{ public default void setMode(LedMode mode){} public static enum LedMode{ // will put constant checks in here Shoot_Accuracy, } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flexunit.ant.report; import java.text.MessageFormat; import java.util.HashMap; /** * Aggregate class representing a collection of Reports stored in a Map<String, Report> */ public class Reports extends HashMap<String, Report> { private static final long serialVersionUID = 2078272511659655555L; private static final String TEST_INFO = "Tests run: {0}, Failures: {1}, Errors: {2}, Skipped: {3}, Time elapsed: {4} sec"; public Reports() { super(); } /** * String version of all reports. */ public String getSummary() { String summary = ""; int runs = 0; int errors = 0; int failures = 0; int skips = 0; long time = 0; for(Report report : this.values()) { runs += report.suite.getTests(); errors += report.suite.getErrors(); failures += report.suite.getFailures(); skips += report.suite.getSkips(); time += report.suite.getTime(); summary += report.getSummary() + "\n"; } summary += "\nResults :\n\n"; try { summary += MessageFormat.format(TEST_INFO, new Object[] { new Integer(runs), new Integer(failures), new Integer(errors), new Integer(skips), formatTime(time) }); } catch(Exception e) { summary += "Error occurred while generating summary ..."; } summary += "\n"; return summary; } private String formatTime(long time) { return String.format("%.3f", new Double(time / 1000.0000)); } /** * Determines if any reports have failures */ public boolean hasFailures() { for(Report report : this.values()) { if(report.hasFailures()) { return true; } } return false; } }
package com.box.boxjavalibv2.requests; import org.apache.http.HttpStatus; import com.box.boxjavalibv2.dao.BoxResourceType; import com.box.boxjavalibv2.interfaces.IBoxJSONParser; import com.box.boxjavalibv2.requests.requestobjects.BoxDefaultRequestObject; import com.box.restclientv2.RestMethod; import com.box.restclientv2.exceptions.BoxRestException; import com.box.restclientv2.interfaces.IBoxConfig; import com.box.restclientv2.requests.DefaultBoxRequest; public class DeleteTrashItemRequest extends DefaultBoxRequest { private static final String URI = "/%s/%s/trash"; public DeleteTrashItemRequest(final IBoxConfig config, final IBoxJSONParser parser, final String id, final BoxResourceType resourceType, final BoxDefaultRequestObject requestObject) throws BoxRestException { super(config, parser, getUri(id, resourceType), RestMethod.DELETE, requestObject); this.setExpectedResponseCode(HttpStatus.SC_NO_CONTENT); } /** * Get uri. * * @param id * id of the item * @param itemType * type of the item * @return uri */ public static String getUri(final String id, final BoxResourceType itemType) { return String.format(URI, itemType.toPluralString(), id); } }
///////////////////////////////////////////////////////////// //CollaboratorRestV2Controller.java //rest-v2-app // Created by Gooru on 2014 // Copyright (c) 2014 Gooru. All rights reserved. // http://www.goorulearning.org/ // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ///////////////////////////////////////////////////////////// package org.ednovo.gooru.controllers.v2.api; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.ednovo.gooru.controllers.BaseController; import org.ednovo.gooru.core.api.model.User; import org.ednovo.gooru.core.constant.ConstantProperties; import org.ednovo.gooru.core.constant.Constants; import org.ednovo.gooru.core.constant.GooruOperationConstants; import org.ednovo.gooru.core.constant.ParameterProperties; import org.ednovo.gooru.core.security.AuthorizeOperations; import org.ednovo.gooru.domain.service.CollaboratorService; import org.ednovo.goorucore.application.serializer.JsonDeserializer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.servlet.ModelAndView; import com.fasterxml.jackson.core.type.TypeReference; @Controller @RequestMapping(value = "v2/collaborator") public class CollaboratorRestV2Controller extends BaseController implements ParameterProperties, ConstantProperties { @Autowired private CollaboratorService collaboratorService; @AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_SCOLLECTION_ADD }) @RequestMapping(value = { "/content/{id}" }, method = RequestMethod.POST) public ModelAndView addCollaborator(@PathVariable(ID) final String gooruOid, @RequestBody final String data, final HttpServletRequest request, final HttpServletResponse response) throws Exception { final User user = (User) request.getAttribute(Constants.USER); return toJsonModelAndView(this.getCollaboratorService().addCollaborator(JsonDeserializer.deserialize(data, new TypeReference<List<String>>() { }), gooruOid, user, true), true); } @AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_SCOLLECTION_READ }) @RequestMapping(value = { "/suggest" }, method = RequestMethod.GET) public ModelAndView collaboratorSuggest(@RequestParam(value = QUERY) final String query, final HttpServletRequest request, final HttpServletResponse response) throws Exception { final User user = (User) request.getAttribute(Constants.USER); return toModelAndView(this.getCollaboratorService().collaboratorSuggest(query, user.getGooruUId()), RESPONSE_FORMAT_JSON); } @AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_SCOLLECTION_READ }) @RequestMapping(value = { "/content/{id}" }, method = RequestMethod.GET) public ModelAndView getCollaborators(@PathVariable(ID) final String gooruOid, @RequestParam(value = GROUP_BY_STATUS, defaultValue = "false", required = false) final Boolean groupByStatus, @RequestParam(value = FILTER_BY, required = false) final String filterBy, final HttpServletRequest request, final HttpServletResponse response) throws Exception { return toJsonModelAndView(groupByStatus ? this.getCollaboratorService().getCollaboratorsByGroup(gooruOid, filterBy) : this.getCollaboratorService().getCollaborators(gooruOid, filterBy), true); } @AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_SCOLLECTION_DELETE }) @RequestMapping(value = { "/content/{id}" }, method = RequestMethod.DELETE) public void deleteCollaborator(@PathVariable(ID) final String gooruOid, @RequestParam final String data, final HttpServletRequest request, final HttpServletResponse response) throws Exception { this.getCollaboratorService().deleteCollaborator(gooruOid, JsonDeserializer.deserialize(data, new TypeReference<List<String>>() { })); } public CollaboratorService getCollaboratorService() { return collaboratorService; } }
/* MIT - License Copyright (c) 2012 - this year, Nils Schmidt Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.nschmidt.ldparteditor.composites; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.RowLayout; import org.eclipse.swt.widgets.Composite; /** * This is a custom toolbar item * * @author nils * */ public class ToolItem extends Composite { public ToolItem(Composite parent, int style, boolean isHorizontal) { super(parent, style); if (isHorizontal) { this.setLayout(new RowLayout(SWT.HORIZONTAL)); @SuppressWarnings("unused") ToolSeparator srt_Seperator = new ToolSeparator(this, SWT.NONE, isHorizontal); } else { this.setLayout(new RowLayout(SWT.VERTICAL)); @SuppressWarnings("unused") ToolSeparator srt_Seperator = new ToolSeparator(this, SWT.NONE, isHorizontal); } } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.block; import static com.google.common.base.Preconditions.checkNotNull; import com.flowpowered.math.vector.Vector3i; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import net.minecraft.block.BlockShulkerBox; import net.minecraft.block.ITileEntityProvider; import net.minecraft.block.state.IBlockState; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.world.WorldServer; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockType; import org.spongepowered.api.block.tileentity.TileEntityArchetype; import org.spongepowered.api.block.tileentity.TileEntityType; import org.spongepowered.api.data.DataContainer; import org.spongepowered.api.data.DataView; import org.spongepowered.api.data.Property; import org.spongepowered.api.data.Queries; import org.spongepowered.api.data.key.Key; import org.spongepowered.api.data.manipulator.ImmutableDataManipulator; import org.spongepowered.api.data.merge.MergeFunction; import org.spongepowered.api.data.value.BaseValue; import org.spongepowered.api.data.value.immutable.ImmutableValue; import org.spongepowered.api.world.BlockChangeFlag; import org.spongepowered.api.world.BlockChangeFlags; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.data.persistence.NbtTranslator; import org.spongepowered.common.data.util.DataQueries; import org.spongepowered.common.data.util.DataUtil; import org.spongepowered.common.data.util.NbtDataUtil; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.PhaseTracker; import org.spongepowered.common.event.tracking.phase.block.BlockPhase; import org.spongepowered.common.interfaces.block.IMixinBlock; import org.spongepowered.common.interfaces.world.IMixinWorldServer; import org.spongepowered.common.registry.type.block.TileEntityTypeRegistryModule; import org.spongepowered.common.util.VecHelper; import org.spongepowered.common.world.BlockChange; import org.spongepowered.common.world.SpongeBlockChangeFlag; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.function.Function; import javax.annotation.Nullable; @SuppressWarnings("unchecked") public class SpongeBlockSnapshot implements BlockSnapshot { private final BlockState blockState; private final BlockState extendedState; private final UUID worldUniqueId; private final Vector3i pos; private final ImmutableList<ImmutableDataManipulator<?, ?>> extraData; private ImmutableMap<Key<?>, ImmutableValue<?>> keyValueMap; private ImmutableSet<ImmutableValue<?>> valueSet; private ImmutableList<ImmutableDataManipulator<?, ?>> blockData; private ImmutableMap<Key<?>, ImmutableValue<?>> blockKeyValueMap; private ImmutableSet<ImmutableValue<?>> blockValueSet; @Nullable final NBTTagCompound compound; @Nullable final UUID creatorUniqueId; @Nullable final UUID notifierUniqueId; // Internal use only private final BlockPos blockPos; private SpongeBlockChangeFlag changeFlag; public BlockChange blockChange; // used for post event public SpongeBlockSnapshot(SpongeBlockSnapshotBuilder builder) { this.blockState = checkNotNull(builder.blockState, "The block state was null!"); this.extendedState = builder.extendedState; this.worldUniqueId = checkNotNull(builder.worldUuid); this.creatorUniqueId = builder.creatorUuid; this.notifierUniqueId = builder.notifierUuid; this.pos = checkNotNull(builder.coords); this.blockPos = VecHelper.toBlockPos(this.pos); // This avoids cross contamination of block state based values versus tile entity values. // TODO - delegate this to NbtProcessors when schematics are merged. final ImmutableMap.Builder<Key<?>, ImmutableValue<?>> tileBuilder = ImmutableMap.builder(); this.extraData = builder.manipulators == null ? ImmutableList.<ImmutableDataManipulator<?, ?>>of() : ImmutableList.copyOf(builder.manipulators); for (ImmutableDataManipulator<?, ?> manipulator : this.extraData) { for (ImmutableValue<?> value : manipulator.getValues()) { tileBuilder.put(value.getKey(), value); } } this.keyValueMap = tileBuilder.build(); this.valueSet = this.keyValueMap.isEmpty() ? ImmutableSet.of() : ImmutableSet.copyOf(this.keyValueMap.values()); this.compound = builder.compound == null ? null : builder.compound.copy(); this.changeFlag = builder.flag; } @Override public BlockState getState() { return this.blockState; } @Override public BlockState getExtendedState() { return this.extendedState; } @Override public BlockSnapshot withState(BlockState blockState) { return createBuilder().blockState(blockState).build(); } @Override public BlockSnapshot withLocation(Location<World> location) { return createBuilder() .position(location.getBlockPosition()) .worldId(location.getExtent().getUniqueId()) .build(); } @Override public BlockSnapshot withContainer(DataContainer container) { return new SpongeBlockSnapshotBuilder().build(container).get(); } @Override public UUID getWorldUniqueId() { return this.worldUniqueId; } @Override public Vector3i getPosition() { return this.pos; } @Override public boolean restore(boolean force, BlockChangeFlag flag) { final Optional<World> optionalWorld = SpongeImpl.getGame().getServer().getWorld(this.worldUniqueId); if (!optionalWorld.isPresent()) { return false; } WorldServer world = (WorldServer) optionalWorld.get(); final IMixinWorldServer mixinWorldServer = (IMixinWorldServer) world; // We need to deterministically define the context as nullable if we don't need to enter. // this way we guarantee an exit. try (PhaseContext<?> context = BlockPhase.State.RESTORING_BLOCKS.createPhaseContext()) { context.buildAndSwitch(); BlockPos pos = VecHelper.toBlockPos(this.pos); if (!world.isValid(pos)) { // Invalid position. Inline this check return false; } IBlockState current = world.getBlockState(pos); IBlockState replaced = (IBlockState) this.blockState; if (!force && (current.getBlock() != replaced.getBlock() || current.getBlock().getMetaFromState(current) != replaced.getBlock() .getMetaFromState(replaced))) { return false; } // Prevent Shulker Boxes from dropping when restoring BlockSnapshot if (current.getBlock().getClass() == BlockShulkerBox.class) { world.removeTileEntity(pos); } PhaseTracker.getInstance().setBlockState(mixinWorldServer, pos, replaced, flag); world.getPlayerChunkMap().markBlockForUpdate(pos); if (this.compound != null) { final TileEntity te = world.getTileEntity(pos); if (te != null) { te.readFromNBT(this.compound); te.markDirty(); } } return true; } } @Override public Optional<UUID> getCreator() { return Optional.ofNullable(this.creatorUniqueId); } @Override public Optional<UUID> getNotifier() { return Optional.ofNullable(this.notifierUniqueId); } @Override public Optional<Location<World>> getLocation() { Optional<World> worldOptional = SpongeImpl.getGame().getServer().getWorld(this.worldUniqueId); if (worldOptional.isPresent()) { return Optional.of(new Location<>(worldOptional.get(), this.getPosition())); } return Optional.empty(); } @Override public List<ImmutableDataManipulator<?, ?>> getManipulators() { return ImmutableList.<ImmutableDataManipulator<?, ?>>builder().addAll(this.getBlockManipulators()).addAll(this.extraData).build(); } @Override public int getContentVersion() { return 1; } @Override public DataContainer toContainer() { final DataContainer container = DataContainer.createNew() .set(Queries.CONTENT_VERSION, getContentVersion()) .set(Queries.WORLD_ID, this.worldUniqueId.toString()) .createView(DataQueries.SNAPSHOT_WORLD_POSITION) .set(Queries.POSITION_X, this.pos.getX()) .set(Queries.POSITION_Y, this.pos.getY()) .set(Queries.POSITION_Z, this.pos.getZ()) .getContainer() .set(DataQueries.BLOCK_STATE, this.blockState); if (this.blockState != this.extendedState) { container.set(DataQueries.BLOCK_EXTENDED_STATE, this.extendedState); } if (this.compound != null) { container.set(DataQueries.UNSAFE_NBT, NbtTranslator.getInstance().translateFrom(this.compound)); } final List<DataView> dataList = DataUtil.getSerializedImmutableManipulatorList(this.extraData); if (!dataList.isEmpty()) { container.set(DataQueries.SNAPSHOT_TILE_DATA, dataList); } return container; } @Override public <T extends ImmutableDataManipulator<?, ?>> Optional<T> get(Class<T> containerClass) { Optional<T> optional = this.blockState.get(containerClass); if (optional.isPresent()) { return optional; } for (ImmutableDataManipulator<?, ?> dataManipulator : this.extraData) { if (containerClass.isInstance(dataManipulator)) { return Optional.of(((T) dataManipulator)); } } return Optional.empty(); } @Override public <T extends ImmutableDataManipulator<?, ?>> Optional<T> getOrCreate(Class<T> containerClass) { return get(containerClass); } @Override public boolean supports(Class<? extends ImmutableDataManipulator<?, ?>> containerClass) { return this.blockState.supports(containerClass); } @Override public <E> Optional<BlockSnapshot> transform(Key<? extends BaseValue<E>> key, Function<E, E> function) { return Optional.empty(); } @Override public <E> Optional<BlockSnapshot> with(Key<? extends BaseValue<E>> key, E value) { Optional<BlockState> optional = this.blockState.with(key, value); if (optional.isPresent()) { return Optional.of(withState(optional.get())); } return Optional.empty(); } @SuppressWarnings("rawtypes") @Override public Optional<BlockSnapshot> with(BaseValue<?> value) { return with((Key) value.getKey(), value.get()); } @Override public Optional<BlockSnapshot> with(ImmutableDataManipulator<?, ?> valueContainer) { if (((IMixinBlock) this.blockState.getType()).supports((Class<ImmutableDataManipulator<?, ?>>) valueContainer.getClass())) { final BlockState newState; boolean changeState = false; if (this.blockState.supports((Class<ImmutableDataManipulator<?, ?>>) valueContainer.getClass())) { newState = this.blockState.with(valueContainer).get(); changeState = true; } else { newState = this.blockState; } if (changeState) { return Optional.of(createBuilder().blockState(newState).build()); } final SpongeBlockSnapshotBuilder builder = createBuilder(); builder.add(valueContainer); return Optional.of(builder.build()); } return Optional.of(createBuilder().add(valueContainer).build()); } @Override public Optional<BlockSnapshot> with(Iterable<ImmutableDataManipulator<?, ?>> valueContainers) { BlockSnapshot snapshot = this; for (ImmutableDataManipulator<?, ?> manipulator : valueContainers) { final Optional<BlockSnapshot> optional = snapshot.with(manipulator); if (!optional.isPresent()) { return Optional.empty(); } snapshot = optional.get(); } return Optional.of(snapshot); } @Override public Optional<BlockSnapshot> without(Class<? extends ImmutableDataManipulator<?, ?>> containerClass) { return Optional.empty(); } @Override public BlockSnapshot merge(BlockSnapshot that) { return merge(that, MergeFunction.FORCE_NOTHING); } @Override public BlockSnapshot merge(BlockSnapshot that, MergeFunction function) { BlockSnapshot merged = this; merged = merged.withState(function.merge(this.blockState, that.getState())); for (ImmutableDataManipulator<?, ?> manipulator : that.getContainers()) { Optional<BlockSnapshot> optional = merged.with(function.merge(this.get(manipulator.getClass()).orElse(null), manipulator)); if (optional.isPresent()) { merged = optional.get(); } } return merged; } @Override public List<ImmutableDataManipulator<?, ?>> getContainers() { return getManipulators(); } @Override public <E> Optional<E> get(Key<? extends BaseValue<E>> key) { if (this.keyValueMap.containsKey(key)) { return Optional.of((E) this.keyValueMap.get(key).get()); } else if (getKeyValueMap().containsKey(key)) { return Optional.of((E) this.blockKeyValueMap.get(key).get()); } return Optional.empty(); } private ImmutableMap<Key<?>, ImmutableValue<?>> getKeyValueMap() { if (this.blockKeyValueMap == null) { final ImmutableMap.Builder<Key<?>, ImmutableValue<?>> mapBuilder = ImmutableMap.builder(); for (ImmutableValue<?> value : this.blockState.getValues()) { mapBuilder.put(value.getKey(), value); } this.blockKeyValueMap = mapBuilder.build(); } return this.blockKeyValueMap; } private ImmutableSet<ImmutableValue<?>> getValueSet() { if (this.blockValueSet == null) { this.blockValueSet = ImmutableSet.copyOf(getKeyValueMap().values()); } return this.blockValueSet; } private ImmutableSet<ImmutableValue<?>> getTileValueSet() { if (this.valueSet == null) { this.valueSet = ImmutableSet.copyOf(this.getTileMap().values()); } return this.valueSet; } private ImmutableMap<Key<?>, ImmutableValue<?>> getTileMap() { if (this.keyValueMap == null) { final ImmutableMap.Builder<Key<?>, ImmutableValue<?>> tileBuilder = ImmutableMap.builder(); for (ImmutableDataManipulator<?, ?> manipulator : this.extraData) { for (ImmutableValue<?> value : manipulator.getValues()) { tileBuilder.put(value.getKey(), value); } } this.keyValueMap = tileBuilder.build(); } return this.keyValueMap; } private ImmutableList<ImmutableDataManipulator<?, ?>> getBlockManipulators() { if (this.blockData == null) { this.blockData = ImmutableList.copyOf(this.blockState.getContainers()); } return this.blockData; } @Override public <E, V extends BaseValue<E>> Optional<V> getValue(Key<V> key) { if (this.keyValueMap.containsKey(key)) { return Optional.of((V) this.keyValueMap.get(key).asMutable()); } else if (getKeyValueMap().containsKey(key)) { return Optional.of((V) this.blockKeyValueMap.get(key).asMutable()); } return Optional.empty(); } @Override public boolean supports(Key<?> key) { checkNotNull(key, "Key"); return this.keyValueMap.containsKey(key) || getKeyValueMap().containsKey(key); } @Override public BlockSnapshot copy() { return this; } private Set<Key<?>> keys; @Override public Set<Key<?>> getKeys() { if (this.keys == null) { this.keys = ImmutableSet.<Key<?>>builder().addAll(getKeyValueMap().keySet()).addAll(getKeyValueMap().keySet()).build(); } return this.keys; } private ImmutableSet<ImmutableValue<?>> values; @Override public Set<ImmutableValue<?>> getValues() { if (this.values == null) { this.values = ImmutableSet.<ImmutableValue<?>>builder().addAll(getTileValueSet()).addAll(getValueSet()).build(); } return this.values; } public Optional<NBTTagCompound> getCompound() { return this.compound == null ? Optional.<NBTTagCompound>empty() : Optional.of(this.compound.copy()); } public SpongeBlockSnapshotBuilder createBuilder() { final SpongeBlockSnapshotBuilder builder = new SpongeBlockSnapshotBuilder(); builder.blockState(this.blockState) .extendedState(this.extendedState) .position(this.pos) .worldId(this.worldUniqueId); for (ImmutableDataManipulator<?, ?> manipulator : this.extraData) { builder.add(manipulator); } if (this.compound != null) { builder.unsafeNbt(this.compound); } return builder; } // Used internally for restores public SpongeBlockChangeFlag getChangeFlag() { return this.changeFlag; } public BlockPos getBlockPos() { return this.blockPos; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("worldUniqueId", this.worldUniqueId) .add("position", this.pos) .add("blockState", this.blockState) .add("extendedState", this.extendedState) .toString(); } @Override public <T extends Property<?, ?>> Optional<T> getProperty(Class<T> propertyClass) { return this.blockState.getProperty(propertyClass); } @Override public Collection<Property<?, ?>> getApplicableProperties() { return this.blockState.getApplicableProperties(); } @Override public Optional<TileEntityArchetype> createArchetype() { final BlockType type = this.blockState.getType(); if (!(type instanceof ITileEntityProvider)) { return Optional.empty(); } if (this.compound == null) { // We can't retrieve the TileEntityType return Optional.empty(); } final String tileId = this.compound.getString(NbtDataUtil.BLOCK_ENTITY_ID); final Class<? extends TileEntity> tileClass = TileEntity.REGISTRY.getObject(new ResourceLocation(tileId)); if (tileClass == null) { return Optional.empty(); } final TileEntityType tileType = TileEntityTypeRegistryModule.getInstance().getForClass(tileClass); final TileEntityArchetype archetype = TileEntityArchetype.builder() .tile(tileType) .state(this.blockState) .tileData(NbtTranslator.getInstance().translate(this.compound)) .build(); return Optional.of(archetype); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SpongeBlockSnapshot that = (SpongeBlockSnapshot) o; return this.changeFlag == that.changeFlag && Objects.equal(this.extendedState, that.extendedState) && Objects.equal(this.worldUniqueId, that.worldUniqueId) && Objects.equal(this.pos, that.pos) && Objects.equal(this.extraData, that.extraData) && Objects.equal(this.compound, that.compound); } @Override public int hashCode() { return Objects .hashCode(this.extendedState, this.worldUniqueId, this.pos, this.extraData, this.changeFlag, this.compound); } }
/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 5.0 */ /* JavaCCOptions: */ package org.apache.el.parser; /** Token Manager Error. */ @SuppressWarnings("all") // Ignore warnings in generated code public class TokenMgrError extends Error { /** * The version identifier for this Serializable class. * Increment only if the <i>serialized</i> form of the * class changes. */ private static final long serialVersionUID = 1L; /* * Ordinals for various reasons why an Error of this type can be thrown. */ /** * Lexical error occurred. */ static final int LEXICAL_ERROR = 0; /** * An attempt was made to create a second instance of a static token * manager. */ static final int STATIC_LEXER_ERROR = 1; /** * Tried to change to an invalid lexical state. */ static final int INVALID_LEXICAL_STATE = 2; /** * Detected (and bailed out of) an infinite loop in the token manager. */ static final int LOOP_DETECTED = 3; /** * Indicates the reason why the exception is thrown. It will have * one of the above 4 values. */ int errorCode; /** * Replaces unprintable characters by their escaped (or unicode escaped) * equivalents in the given string */ protected static final String addEscapes(String str) { StringBuffer retval = new StringBuffer(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { case 0: continue; case '\b': retval.append("\\b"); continue; case '\t': retval.append("\\t"); continue; case '\n': retval.append("\\n"); continue; case '\f': retval.append("\\f"); continue; case '\r': retval.append("\\r"); continue; case '\"': retval.append("\\\""); continue; case '\'': retval.append("\\\'"); continue; case '\\': retval.append("\\\\"); continue; default: if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { String s = "0000" + Integer.toString(ch, 16); retval.append("\\u" + s.substring(s.length() - 4, s .length())); } else { retval.append(ch); } continue; } } return retval.toString(); } /** * Returns a detailed message for the Error when it is thrown by the * token manager to indicate a lexical error. * Parameters : * EOFSeen : indicates if EOF caused the lexical error * curLexState : lexical state in which this error occurred * errorLine : line number when the error occurred * errorColumn : column number when the error occurred * errorAfter : prefix that was seen before this error occurred * curchar : the offending character * Note: You can customize the lexical error message by modifying this * method. */ protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { return ("Lexical error at line " + errorLine + ", column " + errorColumn + ". Encountered: " + (EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int) curChar + "), ") + "after : \"" + addEscapes(errorAfter) + "\""); } /** * You can also modify the body of this method to customize your error * messages. * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not * of end-users concern, so you can return something like : * * "Internal Error : Please file a bug report .... " * * from this method for such cases in the release version of your parser. */ public String getMessage() { return super.getMessage(); } /* * Constructors of various flavors follow. */ /** No arg constructor. */ public TokenMgrError() {} /** Constructor with message and reason. */ public TokenMgrError(String message, int reason) { super(message); errorCode = reason; } /** Full Constructor. */ public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); } } /* * JavaCC - OriginalChecksum=de3ff0bacfb0fe749cc8eaf56ae82fea (do not edit this * line) */
package com.objectcomputing.checkins.services.memberprofile; import io.micronaut.core.annotation.Introspected; import io.swagger.v3.oas.annotations.media.Schema; import io.micronaut.core.annotation.Nullable; import javax.validation.constraints.NotBlank; import javax.validation.constraints.NotNull; import java.time.LocalDate; import java.util.Objects; import java.util.UUID; @Introspected public class MemberProfileCreateDTO { @NotBlank @Schema(description = "first name of the employee") private String firstName; @Nullable @Schema(description = "middle name of the employee") private String middleName; @NotBlank @Schema(description = "last name of the employee") private String lastName; @Nullable @Schema(description = "suffix of the employee") private String suffix; @NotBlank @Schema(description = "employee's title at the company", required = true) private String title ; @Nullable @Schema(description = "employee's professional development lead") private UUID pdlId; @NotBlank @Schema(description = "where the employee is geographically located", required = true) private String location; @NotBlank @Schema(description = "employee's OCI email. Typically last name + first initial @ObjectComputing.com", required = true) private String workEmail; @Nullable @Schema(description = "unique identifier for this employee") private String employeeId; @NotNull @Schema(description = "employee's date of hire", required = true) private LocalDate startDate; @Nullable @Schema(description = "employee's biography") private String bioText; @Nullable @Schema(description = "id of the supervisor this member is associated with", nullable = true) private UUID supervisorid; @Nullable @Schema(description = "employee's date of termination", nullable = true) private LocalDate terminationDate; @Nullable @Schema(description = "Birth date of employee", nullable = true) private LocalDate birthDay; @Nullable @Schema(description = "The employee termination was voluntary", nullable = true) private Boolean voluntary; @Nullable @Schema(description = "The employee is excluded from retention reports", nullable = true) private Boolean excluded; @NotBlank public String getFirstName() { return firstName; } public void setFirstName(@NotBlank String firstName) { this.firstName = firstName; } @Nullable public String getMiddleName() { return middleName; } public void setMiddleName(@Nullable String middleName) { this.middleName = middleName; } @NotBlank public String getLastName() { return lastName; } public void setLastName(@NotBlank String lastName) { this.lastName = lastName; } @Nullable public String getSuffix() { return suffix; } public void setSuffix(@Nullable String suffix) { this.suffix = suffix; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } @Nullable public UUID getPdlId() { return pdlId; } public void setPdlId(@Nullable UUID pdlId) { this.pdlId = pdlId; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public String getWorkEmail() { return workEmail; } public void setWorkEmail(String workEmail) { this.workEmail = workEmail; } @Nullable public String getEmployeeId() { return employeeId; } public void setEmployeeId(@Nullable String employeeId) { this.employeeId = employeeId; } public LocalDate getStartDate() { return startDate; } public void setStartDate(LocalDate startDate) { this.startDate = startDate; } @Nullable public String getBioText() { return bioText; } public void setBioText(@Nullable String bioText) { this.bioText = bioText; } @Nullable public UUID getSupervisorid() { return supervisorid; } public void setSupervisorid(@Nullable UUID supervisorid) { this.supervisorid = supervisorid; } @Nullable public LocalDate getTerminationDate() { return terminationDate; } public void setTerminationDate(@Nullable LocalDate terminationDate) { this.terminationDate = terminationDate; } @Nullable public LocalDate getBirthDay() { return birthDay; } public void setBirthDay(@Nullable LocalDate birthDay) { this.birthDay = birthDay;} @Nullable public Boolean getVoluntary() { return voluntary; } public void setVoluntary(@Nullable Boolean voluntary) { this.voluntary = voluntary; } @Nullable public Boolean getExcluded() { return excluded; } public void setExcluded(@Nullable Boolean excluded) { this.excluded = excluded; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MemberProfileCreateDTO that = (MemberProfileCreateDTO) o; return Objects.equals(firstName, that.firstName) && Objects.equals(middleName, that.middleName) && Objects.equals(lastName, that.lastName) && Objects.equals(suffix, that.suffix) && Objects.equals(title, that.title) && Objects.equals(pdlId, that.pdlId) && Objects.equals(location, that.location) && Objects.equals(workEmail, that.workEmail) && Objects.equals(employeeId, that.employeeId) && Objects.equals(startDate, that.startDate) && Objects.equals(bioText, that.bioText) && Objects.equals(supervisorid, that.supervisorid) && Objects.equals(terminationDate, that.terminationDate) && Objects.equals(birthDay, that.birthDay) && Objects.equals(voluntary, that.voluntary) && Objects.equals(excluded, that.excluded); } @Override public int hashCode() { return Objects.hash(firstName, middleName, lastName, suffix, title, pdlId, location, workEmail, employeeId, startDate, bioText, supervisorid, terminationDate, birthDay, voluntary, excluded); } }
package dev.antoineraulin.numerama; import android.content.Context; import android.content.Intent; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.koushikdutta.ion.Ion; /** * Created by antoineraulin on 14/02/2017. */ public class MyViewHolder extends RecyclerView.ViewHolder{ private TextView textViewView; private ImageView imageView; private TextView dateTextView; private TextView descriptionTextView; private LinearLayout descriptionLinearLayout; private TextView catTextView; public MyViewHolder(View itemView) { super(itemView); textViewView = (TextView) itemView.findViewById(R.id.text); dateTextView = (TextView) itemView.findViewById(R.id.time); imageView = (ImageView) itemView.findViewById(R.id.image); descriptionTextView = (TextView) itemView.findViewById(R.id.description); descriptionLinearLayout = (LinearLayout) itemView.findViewById(R.id.descLa); catTextView = (TextView) itemView.findViewById(R.id.cat); } public void bind(final MyObject myObject){ textViewView.setText(myObject.getText()); catTextView.setText(myObject.getCat()); descriptionTextView.setText(myObject.getDescription()); dateTextView.setText(", Il y a "+myObject.getDate()+" "+myObject.getUnite()); Log.e("picasso", "url : '"+myObject.getImageUrl()+"'"); Ion.with(imageView) .placeholder(R.mipmap.nblack) .load(myObject.getImageUrl()); textViewView.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { Context context = v.getContext(); Intent intent=new Intent(v.getContext(),ArticleActivity.class); intent.putExtra("link", myObject.getLink()); intent.putExtra("title", myObject.getText()); intent.putExtra("image", myObject.getImageUrl()); context.startActivity(intent); } }); descriptionTextView.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { Context context = v.getContext(); Intent intent=new Intent(v.getContext(),ArticleActivity.class); intent.putExtra("link", myObject.getLink()); intent.putExtra("title", myObject.getText()); intent.putExtra("image", myObject.getImageUrl()); context.startActivity(intent); } }); imageView.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { Context context = v.getContext(); Intent intent=new Intent(v.getContext(),ArticleActivity.class); intent.putExtra("link", myObject.getLink()); intent.putExtra("title", myObject.getText()); intent.putExtra("image", myObject.getImageUrl()); context.startActivity(intent); } }); } }
/* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.comprehendmedical.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.comprehendmedical.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * StartRxNormInferenceJobResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class StartRxNormInferenceJobResultJsonUnmarshaller implements Unmarshaller<StartRxNormInferenceJobResult, JsonUnmarshallerContext> { public StartRxNormInferenceJobResult unmarshall(JsonUnmarshallerContext context) throws Exception { StartRxNormInferenceJobResult startRxNormInferenceJobResult = new StartRxNormInferenceJobResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return startRxNormInferenceJobResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("JobId", targetDepth)) { context.nextToken(); startRxNormInferenceJobResult.setJobId(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return startRxNormInferenceJobResult; } private static StartRxNormInferenceJobResultJsonUnmarshaller instance; public static StartRxNormInferenceJobResultJsonUnmarshaller getInstance() { if (instance == null) instance = new StartRxNormInferenceJobResultJsonUnmarshaller(); return instance; } }
package com.coolweather.adnroid.util; import okhttp3.OkHttpClient; import okhttp3.Request; /** * Created by wu on 2017/4/10. */ public class HttpUtil { public static void sendOkHttpRequest(String address, okhttp3.Callback callback){ OkHttpClient client = new OkHttpClient(); Request request = new Request.Builder().url(address).build(); client.newCall(request).enqueue(callback); } }
package br.com.zupacademy.rafael.proposta.cartaoassociarcarteira; import br.com.zupacademy.rafael.proposta.criarcartao.Cartao; import javax.persistence.*; import javax.validation.constraints.Email; import javax.validation.constraints.NotBlank; import javax.validation.constraints.NotNull; @Entity public class Carteira { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @NotBlank private String nome; @NotBlank @Email private String email; @NotNull @ManyToOne private Cartao cartao; @Deprecated public Carteira() {} public Carteira(@NotBlank String nome, @NotBlank @Email String email, @NotNull Cartao cartao) { this.nome = nome; this.email = email; this.cartao = cartao; } public Long getId() { return id; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((nome == null) ? 0 : nome.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Carteira other = (Carteira) obj; if (nome == null) { if (other.nome != null) return false; } else if (!nome.equals(other.nome)) return false; return true; } @Override public String toString() { return "Carteira [id=" + id + ", nome=" + nome + ", email=" + email + ", cartao=" + cartao + "]"; } }
package com.mysql.cj.protocol; public interface Warning { int getLevel(); long getCode(); String getMessage(); } /* Location: C:\Users\BSV\AppData\Local\Temp\Rar$DRa6216.20396\Preview\Preview.jar!\com\mysql\cj\protocol\Warning.class * Java compiler version: 8 (52.0) * JD-Core Version: 1.1.3 */
package com.zsn.gulimall.order.entity; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import java.math.BigDecimal; import java.io.Serializable; import java.util.Date; import lombok.Data; /** * 订单退货申请; InnoDB free: 7168 kB * * @author zsn * @email 1765509297@qq.com * @date 2022-02-24 15:55:04 */ @Data @TableName("oms_order_return_apply") public class OrderReturnApplyEntity implements Serializable { private static final long serialVersionUID = 1L; /** * id */ @TableId private Long id; /** * order_id */ private Long orderId; /** * 退货商品id */ private Long skuId; /** * 订单编号 */ private String orderSn; /** * 申请时间 */ private Date createTime; /** * 会员用户名 */ private String memberUsername; /** * 退款金额 */ private BigDecimal returnAmount; /** * 退货人姓名 */ private String returnName; /** * 退货人电话 */ private String returnPhone; /** * 申请状态[0->待处理;1->退货中;2->已完成;3->已拒绝] */ private Integer status; /** * 处理时间 */ private Date handleTime; /** * 商品图片 */ private String skuImg; /** * 商品名称 */ private String skuName; /** * 商品品牌 */ private String skuBrand; /** * 商品销售属性(JSON) */ private String skuAttrsVals; /** * 退货数量 */ private Integer skuCount; /** * 商品单价 */ private BigDecimal skuPrice; /** * 商品实际支付单价 */ private BigDecimal skuRealPrice; /** * 原因 */ private String reason; /** * 描述 */ private String description述; /** * 凭证图片,以逗号隔开 */ private String descPics; /** * 处理备注 */ private String handleNote; /** * 处理人员 */ private String handleMan; /** * 收货人 */ private String receiveMan; /** * 收货时间 */ private Date receiveTime; /** * 收货备注 */ private String receiveNote; /** * 收货电话 */ private String receivePhone; /** * 公司收货地址 */ private String companyAddress; }
package com.shahenlibrary.videoprocessor; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaExtractor; import android.media.MediaFormat; import android.media.MediaMuxer; import android.view.Surface; import com.shahenlibrary.videoprocessor.util.CL; import com.shahenlibrary.videoprocessor.util.VideoProgressAve; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import static com.shahenlibrary.videoprocessor.VideoProcessor.DEFAULT_FRAME_RATE; import static com.shahenlibrary.videoprocessor.VideoProcessor.TIMEOUT_USEC; /** * Created by huangwei on 2018/4/8 0008. */ public class VideoEncodeThread extends Thread implements IVideoEncodeThread { private MediaCodec mEncoder; private MediaMuxer mMuxer; private AtomicBoolean mDecodeDone; private CountDownLatch mMuxerStartLatch; private Exception mException; private int mBitrate; private int mResultWidth; private int mResultHeight; private int mIFrameInterval; private int mFrameRate; private MediaExtractor mExtractor; private int mVideoIndex; // private volatile InputSurface mInputSurface; private volatile CountDownLatch mEglContextLatch; private volatile Surface mSurface; private VideoProgressAve mProgressAve; public VideoEncodeThread(MediaExtractor extractor, MediaMuxer muxer, int bitrate, int resultWidth, int resultHeight, int iFrameInterval, int frameRate, int videoIndex, AtomicBoolean decodeDone, CountDownLatch muxerStartLatch) { super("VideoProcessEncodeThread"); mMuxer = muxer; mDecodeDone = decodeDone; mMuxerStartLatch = muxerStartLatch; mExtractor = extractor; mBitrate = bitrate; mResultHeight = resultHeight; mResultWidth = resultWidth; mIFrameInterval = iFrameInterval; mVideoIndex = videoIndex; mFrameRate = frameRate; mEglContextLatch = new CountDownLatch(1); } @Override public void run() { super.run(); try { doEncode(); } catch (Exception e) { CL.e(e); mException = e; } finally { try { if (mEncoder != null) { mEncoder.stop(); mEncoder.release(); } } catch (Exception e) { mException = mException == null ? e : mException; CL.e(e); } } } private void doEncode() throws IOException { MediaFormat inputFormat = mExtractor.getTrackFormat(mVideoIndex); //初始化编码器 int frameRate; if (mFrameRate > 0) { frameRate = mFrameRate; } else { frameRate = inputFormat.containsKey(MediaFormat.KEY_FRAME_RATE) ? inputFormat.getInteger(inputFormat.KEY_FRAME_RATE) : DEFAULT_FRAME_RATE; } String mimeType = VideoProcessor.OUTPUT_MIME_TYPE; MediaFormat outputFormat = MediaFormat.createVideoFormat(mimeType, mResultWidth, mResultHeight); outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval); mEncoder = MediaCodec.createEncoderByType(mimeType); boolean supportProfileHigh = VideoUtil.trySetProfileAndLevel(mEncoder, mimeType, outputFormat, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh, MediaCodecInfo.CodecProfileLevel.AVCLevel31 ); if (supportProfileHigh) { CL.i("supportProfileHigh,enable ProfileHigh"); } int maxBitrate = VideoUtil.getMaxSupportBitrate(mEncoder,mimeType); if (maxBitrate > 0 && mBitrate > maxBitrate) { CL.e(mBitrate + " bitrate too large,set to:" + maxBitrate); mBitrate = (int) (maxBitrate * 0.8f);//直接设置最大值小米2报错 } outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitrate); mEncoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mSurface = mEncoder.createInputSurface(); // mInputSurface = new InputSurface(encodeSurface); // mInputSurface.makeCurrent(); mEncoder.start(); mEglContextLatch.countDown(); boolean signalEncodeEnd = false; MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int encodeTryAgainCount = 0; int videoTrackIndex = -5; boolean detectTimeError = false; final int VIDEO_FRAME_TIME_US = (int) (1000 * 1000f / frameRate); long lastVideoFrameTimeUs = -1; //开始编码 //输出 while (true) { if (mDecodeDone.get() && !signalEncodeEnd) { signalEncodeEnd = true; mEncoder.signalEndOfInputStream(); } int outputBufferIndex = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC); CL.i("encode outputBufferIndex = " + outputBufferIndex); if (signalEncodeEnd && outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { encodeTryAgainCount++; if (encodeTryAgainCount > 10) { //三星S8上出现signalEndOfInputStream之后一直tryAgain的问题 CL.e("INFO_TRY_AGAIN_LATER 10 times,force End!"); break; } } else { encodeTryAgainCount = 0; } if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { continue; } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = mEncoder.getOutputFormat(); if (videoTrackIndex == -5) { videoTrackIndex = mMuxer.addTrack(newFormat); mMuxer.start(); mMuxerStartLatch.countDown(); } CL.i("encode newFormat = " + newFormat); } else if (outputBufferIndex < 0) { //ignore CL.e("unexpected result from decoder.dequeueOutputBuffer: " + outputBufferIndex); } else { //编码数据可用 ByteBuffer outputBuffer = mEncoder.getOutputBuffer(outputBufferIndex); if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM && info.presentationTimeUs < 0) { info.presentationTimeUs = 0; } //写入视频 if (!detectTimeError && lastVideoFrameTimeUs != -1 && info.presentationTimeUs < lastVideoFrameTimeUs + VIDEO_FRAME_TIME_US / 2) { //某些视频帧时间会出错 CL.e("video 时间戳错误,lastVideoFrameTimeUs:" + lastVideoFrameTimeUs + " " + "info.presentationTimeUs:" + info.presentationTimeUs + " VIDEO_FRAME_TIME_US:" + VIDEO_FRAME_TIME_US); detectTimeError = true; } if (detectTimeError) { info.presentationTimeUs = lastVideoFrameTimeUs + VIDEO_FRAME_TIME_US; CL.e("video 时间戳错误,使用修正的时间戳:" + info.presentationTimeUs); detectTimeError = false; } if (info.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) { lastVideoFrameTimeUs = info.presentationTimeUs; } CL.i("writeSampleData,size:" + info.size + " time:" + info.presentationTimeUs / 1000); mMuxer.writeSampleData(videoTrackIndex, outputBuffer, info); notifyProgress(info); mEncoder.releaseOutputBuffer(outputBufferIndex, false); if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) { CL.i("encoderDone"); break; } } } } private void notifyProgress(MediaCodec.BufferInfo info) { if (mProgressAve == null) { return; } mProgressAve.setEncodeTimeStamp((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) > 0 ? Long.MAX_VALUE : info.presentationTimeUs); } @Override public Surface getSurface() { return mSurface; } @Override public CountDownLatch getEglContextLatch() { return mEglContextLatch; } public Exception getException() { return mException; } public void setProgressAve(VideoProgressAve progressAve) { mProgressAve = progressAve; } }
/** * Created by imink on 08/03/2017. */ public class LinkedListBaseQs { }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.data.v2.stub; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.bigtable.v2.CheckAndMutateRowRequest; import com.google.bigtable.v2.CheckAndMutateRowResponse; import com.google.bigtable.v2.MutateRowRequest; import com.google.bigtable.v2.MutateRowResponse; import com.google.bigtable.v2.MutateRowsRequest; import com.google.bigtable.v2.MutateRowsResponse; import com.google.bigtable.v2.ReadModifyWriteRowRequest; import com.google.bigtable.v2.ReadModifyWriteRowResponse; import com.google.bigtable.v2.ReadRowsRequest; import com.google.bigtable.v2.ReadRowsResponse; import com.google.bigtable.v2.SampleRowKeysRequest; import com.google.bigtable.v2.SampleRowKeysResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS /** For internal use only. */ @Generated("by gapic-generator") @InternalApi public class BigtableStubSettings extends StubSettings<BigtableStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/bigtable.data") .add("https://www.googleapis.com/auth/bigtable.data.readonly") .add("https://www.googleapis.com/auth/cloud-bigtable.data") .add("https://www.googleapis.com/auth/cloud-bigtable.data.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/cloud-platform.read-only") .build(); private final ServerStreamingCallSettings<ReadRowsRequest, ReadRowsResponse> readRowsSettings; private final ServerStreamingCallSettings<SampleRowKeysRequest, SampleRowKeysResponse> sampleRowKeysSettings; private final UnaryCallSettings<MutateRowRequest, MutateRowResponse> mutateRowSettings; private final ServerStreamingCallSettings<MutateRowsRequest, MutateRowsResponse> mutateRowsSettings; private final UnaryCallSettings<CheckAndMutateRowRequest, CheckAndMutateRowResponse> checkAndMutateRowSettings; private final UnaryCallSettings<ReadModifyWriteRowRequest, ReadModifyWriteRowResponse> readModifyWriteRowSettings; /** Returns the object with the settings used for calls to readRows. */ public ServerStreamingCallSettings<ReadRowsRequest, ReadRowsResponse> readRowsSettings() { return readRowsSettings; } /** Returns the object with the settings used for calls to sampleRowKeys. */ public ServerStreamingCallSettings<SampleRowKeysRequest, SampleRowKeysResponse> sampleRowKeysSettings() { return sampleRowKeysSettings; } /** Returns the object with the settings used for calls to mutateRow. */ public UnaryCallSettings<MutateRowRequest, MutateRowResponse> mutateRowSettings() { return mutateRowSettings; } /** Returns the object with the settings used for calls to mutateRows. */ public ServerStreamingCallSettings<MutateRowsRequest, MutateRowsResponse> mutateRowsSettings() { return mutateRowsSettings; } /** Returns the object with the settings used for calls to checkAndMutateRow. */ public UnaryCallSettings<CheckAndMutateRowRequest, CheckAndMutateRowResponse> checkAndMutateRowSettings() { return checkAndMutateRowSettings; } /** Returns the object with the settings used for calls to readModifyWriteRow. */ public UnaryCallSettings<ReadModifyWriteRowRequest, ReadModifyWriteRowResponse> readModifyWriteRowSettings() { return readModifyWriteRowSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public BigtableStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigtableStub.create(this); } else { throw new UnsupportedOperationException( "Transport not supported: " + getTransportChannelProvider().getTransportName()); } } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "bigtable.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(BigtableStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected BigtableStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); readRowsSettings = settingsBuilder.readRowsSettings().build(); sampleRowKeysSettings = settingsBuilder.sampleRowKeysSettings().build(); mutateRowSettings = settingsBuilder.mutateRowSettings().build(); mutateRowsSettings = settingsBuilder.mutateRowsSettings().build(); checkAndMutateRowSettings = settingsBuilder.checkAndMutateRowSettings().build(); readModifyWriteRowSettings = settingsBuilder.readModifyWriteRowSettings().build(); } /** Builder for BigtableStubSettings. */ public static class Builder extends StubSettings.Builder<BigtableStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final ServerStreamingCallSettings.Builder<ReadRowsRequest, ReadRowsResponse> readRowsSettings; private final ServerStreamingCallSettings.Builder<SampleRowKeysRequest, SampleRowKeysResponse> sampleRowKeysSettings; private final UnaryCallSettings.Builder<MutateRowRequest, MutateRowResponse> mutateRowSettings; private final ServerStreamingCallSettings.Builder<MutateRowsRequest, MutateRowsResponse> mutateRowsSettings; private final UnaryCallSettings.Builder<CheckAndMutateRowRequest, CheckAndMutateRowResponse> checkAndMutateRowSettings; private final UnaryCallSettings.Builder<ReadModifyWriteRowRequest, ReadModifyWriteRowResponse> readModifyWriteRowSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "idempotent", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(10L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(20000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(20000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("idempotent_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(10L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(20000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(20000L)) .setTotalTimeout(Duration.ofMillis(20000L)) .build(); definitions.put("non_idempotent_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(10L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(300000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(300000L)) .setTotalTimeout(Duration.ofMillis(43200000L)) .build(); definitions.put("read_rows_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(10L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(60000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("mutate_rows_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(clientContext); readRowsSettings = ServerStreamingCallSettings.newBuilder(); sampleRowKeysSettings = ServerStreamingCallSettings.newBuilder(); mutateRowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); mutateRowsSettings = ServerStreamingCallSettings.newBuilder(); checkAndMutateRowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); readModifyWriteRowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( mutateRowSettings, checkAndMutateRowSettings, readModifyWriteRowSettings); initDefaults(this); } private static Builder createDefault() { Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .readRowsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("read_rows_params")); builder .sampleRowKeysSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("idempotent_params")); builder .mutateRowSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("idempotent_params")); builder .mutateRowsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("mutate_rows_params")); builder .checkAndMutateRowSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("non_idempotent_params")); builder .readModifyWriteRowSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("non_idempotent_params")); return builder; } protected Builder(BigtableStubSettings settings) { super(settings); readRowsSettings = settings.readRowsSettings.toBuilder(); sampleRowKeysSettings = settings.sampleRowKeysSettings.toBuilder(); mutateRowSettings = settings.mutateRowSettings.toBuilder(); mutateRowsSettings = settings.mutateRowsSettings.toBuilder(); checkAndMutateRowSettings = settings.checkAndMutateRowSettings.toBuilder(); readModifyWriteRowSettings = settings.readModifyWriteRowSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( mutateRowSettings, checkAndMutateRowSettings, readModifyWriteRowSettings); } // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to readRows. */ public ServerStreamingCallSettings.Builder<ReadRowsRequest, ReadRowsResponse> readRowsSettings() { return readRowsSettings; } /** Returns the builder for the settings used for calls to sampleRowKeys. */ public ServerStreamingCallSettings.Builder<SampleRowKeysRequest, SampleRowKeysResponse> sampleRowKeysSettings() { return sampleRowKeysSettings; } /** Returns the builder for the settings used for calls to mutateRow. */ public UnaryCallSettings.Builder<MutateRowRequest, MutateRowResponse> mutateRowSettings() { return mutateRowSettings; } /** Returns the builder for the settings used for calls to mutateRows. */ public ServerStreamingCallSettings.Builder<MutateRowsRequest, MutateRowsResponse> mutateRowsSettings() { return mutateRowsSettings; } /** Returns the builder for the settings used for calls to checkAndMutateRow. */ public UnaryCallSettings.Builder<CheckAndMutateRowRequest, CheckAndMutateRowResponse> checkAndMutateRowSettings() { return checkAndMutateRowSettings; } /** Returns the builder for the settings used for calls to readModifyWriteRow. */ public UnaryCallSettings.Builder<ReadModifyWriteRowRequest, ReadModifyWriteRowResponse> readModifyWriteRowSettings() { return readModifyWriteRowSettings; } @Override public BigtableStubSettings build() throws IOException { return new BigtableStubSettings(this); } } }
package hudson.plugins.emailext; import hudson.Extension; import hudson.model.AbstractDescribableImpl; import hudson.model.Descriptor; import hudson.util.Secret; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import static hudson.Util.nullify; public class MailAccount extends AbstractDescribableImpl<MailAccount>{ private String address = null; private String smtpHost = null; private String smtpPort = null; private String smtpUsername = null; private Secret smtpPassword = null; private boolean useSsl = false; private String advProperties = null; @DataBoundConstructor public MailAccount(JSONObject jo){ address = nullify(jo.optString("address", null)); smtpHost = nullify(jo.optString("smtpHost", null)); smtpPort = nullify(jo.optString("smtpPort", null)); if(jo.optBoolean("auth", false)){ smtpUsername = nullify(jo.optString("smtpUsername", null)); String pass = nullify(jo.optString("smtpPassword", null)); if(pass != null) { smtpPassword = Secret.fromString(pass); } } useSsl = jo.optBoolean("useSsl", false); advProperties = nullify(jo.optString("advProperties", null)); } public MailAccount(){ } public boolean isValid() { return StringUtils.isNotBlank(address) && StringUtils.isNotBlank(smtpHost) && (!isAuth() || (StringUtils.isNotBlank(smtpUsername) && smtpPassword != null)); } @Extension public static class MailAccountDescriptor extends Descriptor<MailAccount>{ @Override public String getDisplayName(){ return ""; } } public boolean isAuth(){ return smtpUsername != null; } public String getAddress(){ return address; } public String getSmtpHost(){ return smtpHost; } public String getSmtpPort(){ return smtpPort; } public String getSmtpUsername(){ return smtpUsername; } public Secret getSmtpPassword(){ return smtpPassword; } public boolean isUseSsl(){ return useSsl; } public String getAdvProperties(){ return advProperties; } public void setAddress(String address){ this.address = address; } public void setSmtpHost(String smtpHost){ this.smtpHost = smtpHost; } public void setSmtpPort(String smtpPort){ this.smtpPort = smtpPort; } public void setSmtpUsername(String smtpUsername){ this.smtpUsername = smtpUsername; } public void setSmtpPassword(String smtpPassword){ this.smtpPassword = Secret.fromString(smtpPassword); } public void setSmtpPassword(Secret smtpPassword){ this.smtpPassword = smtpPassword; } public void setUseSsl(boolean useSsl){ this.useSsl = useSsl; } public void setAdvProperties(String advProperties){ this.advProperties = advProperties; } }
package com.fasterxml.jackson.jr.ob.impl; import java.io.IOException; import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.jr.ob.JSONObjectException; import com.fasterxml.jackson.jr.ob.api.CollectionBuilder; import com.fasterxml.jackson.jr.ob.api.ValueReader; /** * Reader for typed Array values. */ public class ArrayReader extends ValueReader { protected final Class<?> _elementType; protected final ValueReader _valueReader; public ArrayReader(Class<?> arrayType, Class<?> elementType, ValueReader vr) { super(arrayType); _elementType = elementType; _valueReader = vr; } @Override public Object readNext(JSONReader r, JsonParser p) throws IOException { if (p.nextToken() != JsonToken.START_ARRAY) { if (p.hasToken(JsonToken.VALUE_NULL)) { return null; } throw JSONObjectException.from(p, "Unexpected token %s; should get START_ARRAY", p.currentToken()); } CollectionBuilder b = r._collectionBuilder(null); if (p.nextToken() == JsonToken.END_ARRAY) { return b.emptyArray(_elementType); } Object value = _valueReader.read(r, p); if (p.nextToken() == JsonToken.END_ARRAY) { return b.singletonArray(_elementType, value); } b = b.start().add(value); do { b = b.add(_valueReader.read(r, p)); } while (p.nextToken() != JsonToken.END_ARRAY); return b.buildArray(_elementType); } @Override public Object read(JSONReader r, JsonParser p) throws IOException { CollectionBuilder b = r._collectionBuilder(null); if (p.nextToken() == JsonToken.END_ARRAY) { return b.emptyArray(_elementType); } Object value = _valueReader.read(r, p); if (p.nextToken() == JsonToken.END_ARRAY) { return b.singletonArray(_elementType, value); } b = b.start().add(value); do { b = b.add(_valueReader.read(r, p)); } while (p.nextToken() != JsonToken.END_ARRAY); return b.buildArray(_elementType); } }
package com.data2semantics.yasgui.mgwtlinker.linker; /* * #%L * YASGUI * %% * Copyright (C) 2013 Laurens Rietveld * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ import java.util.Date; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** * Writing a manifest file from a given set of resources * * @author Daniel Kurka * */ public class ManifestWriter { /** * Write a manifest file for the given set of artifacts and return it as a * string * * @param staticResources - the static resources of the app, such as * index.html file * @param cacheResources the gwt output artifacts like cache.html files * @return the manifest as a string */ public String writeManifest(List<String> staticResources, Set<String> cacheResources, Map<String, String> fallbacks) { if (staticResources == null) throw new IllegalArgumentException("staticResources can not be null"); if (cacheResources == null) throw new IllegalArgumentException("cacheResources can not be null"); StringBuilder sb = new StringBuilder(); sb.append("CACHE MANIFEST\n"); //build something unique so that the manifest file changes on recompile sb.append("# Unique id #" + (new Date()).getTime() + "." + Math.random() + "\n"); sb.append("\n"); sb.append("CACHE:\n"); sb.append("# Static app files\n"); for (String resources : staticResources) { sb.append(resources + "\n"); } sb.append("\n# GWT compiled files\n"); for (String resources : cacheResources) { sb.append(resources + "\n"); } if (fallbacks != null && fallbacks.size() > 0) { sb.append("\n\n"); sb.append("FALLBACK:\n"); for (Entry<String, String> entry: fallbacks.entrySet()) { sb.append(entry.getKey() + " " + entry.getValue()); } } sb.append("\n\n"); sb.append("# All other resources require the client to be online.\n"); sb.append("NETWORK:\n"); sb.append("*\n"); return sb.toString(); } }
package at.outdated.bitcoin.exchange.api.track; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; /** * Created with IntelliJ IDEA. * User: ebirn * Date: 10.05.13 * Time: 14:03 * To change this template use File | Settings | File Templates. */ public class NumberTrack extends ValueTrack<Number> { DescriptiveStatistics stats; public NumberTrack(int count) { super(count); stats = new DescriptiveStatistics(count); } @Override public void insert(Number value) { super.insert(value); stats.addValue(value.doubleValue()); } public DescriptiveStatistics getStatistics() { return stats; } }
package com.hnca.gongshangcheck.bean; import android.content.Context; import android.content.res.TypedArray; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.hnca.gongshangcheck.R; /** * 创建人: syy * 创建时间:2019-04-03 * 功能描述: */ public class item_view extends LinearLayout { private ImageView imageView;//item的图标 private TextView textView;//item的文字 private ImageView bottomview; private boolean isbootom=true;//是否显示底部的下划线 public item_view(Context context) { this(context,null); } public item_view(Context context, @Nullable AttributeSet attrs) { this(context, attrs,-1); } public item_view(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); LayoutInflater.from(getContext()).inflate(R.layout.setting_item_view,this); /* LayoutInflater mInflater = LayoutInflater.from(context); View myView = mInflater.inflate(R.layout.item_view, null); addView(myView);*/ TypedArray ta=context.obtainStyledAttributes(attrs,R.styleable.item_view); isbootom=ta.getBoolean(R.styleable.item_view_show_bottomline,true); bottomview=findViewById(R.id.item_bottom); imageView=findViewById(R.id.item_img); textView=findViewById(R.id.item_text); textView.setText(ta.getString(R.styleable.item_view_show_text)); imageView.setBackgroundResource(ta.getResourceId(R.styleable.item_view_show_leftimg,R.drawable.ic_sitting)); ta.recycle(); initview(); } private void initview() { if(isbootom){ bottomview.setVisibility(View.VISIBLE); }else{ bottomview.setVisibility(View.GONE); } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.test.context; import org.springframework.core.annotation.AliasFor; import java.lang.annotation.*; /** * {@code ActiveProfiles} is a class-level annotation that is used to declare * which <em>active bean definition profiles</em> should be used when loading * an {@link org.springframework.context.ApplicationContext ApplicationContext} * for test classes. * * <p>As of Spring Framework 4.0, this annotation may be used as a * <em>meta-annotation</em> to create custom <em>composed annotations</em>. * * @author Sam Brannen * @see SmartContextLoader * @see MergedContextConfiguration * @see ContextConfiguration * @see ActiveProfilesResolver * @see org.springframework.context.ApplicationContext * @see org.springframework.context.annotation.Profile * @since 3.1 */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @Inherited public @interface ActiveProfiles { /** * Alias for {@link #profiles}. * <p>This attribute may <strong>not</strong> be used in conjunction with * {@link #profiles}, but it may be used <em>instead</em> of {@link #profiles}. */ @AliasFor("profiles") String[] value() default {}; /** * The bean definition profiles to activate. * <p>This attribute may <strong>not</strong> be used in conjunction with * {@link #value}, but it may be used <em>instead</em> of {@link #value}. */ @AliasFor("value") String[] profiles() default {}; /** * The type of {@link ActiveProfilesResolver} to use for resolving the active * bean definition profiles programmatically. * * @see ActiveProfilesResolver * @since 4.0 */ Class<? extends ActiveProfilesResolver> resolver() default ActiveProfilesResolver.class; /** * Whether or not bean definition profiles from superclasses should be * <em>inherited</em>. * <p>The default value is {@code true}, which means that a test * class will <em>inherit</em> bean definition profiles defined by a * test superclass. Specifically, the bean definition profiles for a test * class will be appended to the list of bean definition profiles * defined by a test superclass. Thus, subclasses have the option of * <em>extending</em> the list of bean definition profiles. * <p>If {@code inheritProfiles} is set to {@code false}, the bean * definition profiles for the test class will <em>shadow</em> and * effectively replace any bean definition profiles defined by a superclass. * <p>In the following example, the {@code ApplicationContext} for * {@code BaseTest} will be loaded using only the &quot;base&quot; * bean definition profile; beans defined in the &quot;extended&quot; profile * will therefore not be loaded. In contrast, the {@code ApplicationContext} * for {@code ExtendedTest} will be loaded using the &quot;base&quot; * <strong>and</strong> &quot;extended&quot; bean definition profiles. * <pre class="code"> * &#064;ActiveProfiles(&quot;base&quot;) * &#064;ContextConfiguration * public class BaseTest { * // ... * } * * &#064;ActiveProfiles(&quot;extended&quot;) * &#064;ContextConfiguration * public class ExtendedTest extends BaseTest { * // ... * } * </pre> * <p>Note: {@code @ActiveProfiles} can be used when loading an * {@code ApplicationContext} from path-based resource locations or * annotated classes. * * @see ContextConfiguration#locations * @see ContextConfiguration#classes * @see ContextConfiguration#inheritLocations */ boolean inheritProfiles() default true; }
package io.github.hooj0.arrays._07_expand_capacity_array; /** * 数组扩容测试 * @author hoojo * @createDate 2018年8月11日 上午10:33:56 * @file App.java * @package io.github.hooj0.arrays._07_expand_capacity_array * @project data-struct * @blog http://hoojo.cnblogs.com * @email hoojo_@126.com * @version 1.0 */ public class App { public static void main(String[] args) { ExpandCapacityArray<Object> array = new ExpandCapacityArray<>(3); array.addLast(1); array.addLast(2); array.addLast("c"); System.out.println(array); // size: 3, cap: 3 [1, 2, c] // 扩容,可以存储更多元素 array.resize(5); array.addLast("d"); System.out.println(array); // size: 4, cap: 5 [1, 2, c, d, null] array.addLast(111); array.addLast(222); array.addLast(333); // auto resize System.out.println(array); // size: 7, cap: 10 [1, 2, c, d, 111, 222, 333, null, null, null] array.removeFirst(); array.remove(3); array.removeLast(); System.out.println(array); // size: 4, cap: 5 [2, c, d, 222, null] } }
package net.minecraft.item.crafting; import com.google.common.collect.Lists; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonSyntaxException; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntComparators; import it.unimi.dsi.fastutil.ints.IntList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Predicate; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.annotation.Nullable; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.network.PacketBuffer; import net.minecraft.tags.ITag; import net.minecraft.tags.TagCollectionManager; import net.minecraft.util.IItemProvider; import net.minecraft.util.JSONUtils; import net.minecraft.util.ResourceLocation; import net.minecraft.util.registry.Registry; public final class Ingredient implements Predicate<ItemStack> { public static final Ingredient EMPTY = new Ingredient(Stream.empty()); private final Ingredient.IItemList[] acceptedItems; private ItemStack[] matchingStacks; private IntList matchingStacksPacked; private Ingredient(Stream <? extends Ingredient.IItemList > itemLists) { this.acceptedItems = itemLists.toArray((size) -> { return new Ingredient.IItemList[size]; }); } public ItemStack[] getMatchingStacks() { this.determineMatchingStacks(); return this.matchingStacks; } private void determineMatchingStacks() { if (this.matchingStacks == null) { this.matchingStacks = Arrays.stream(this.acceptedItems).flatMap((ingredientList) -> { return ingredientList.getStacks().stream(); }).distinct().toArray((size) -> { return new ItemStack[size]; }); } } public boolean test(@Nullable ItemStack p_test_1_) { if (p_test_1_ == null) { return false; } else { this.determineMatchingStacks(); if (this.matchingStacks.length == 0) { return p_test_1_.isEmpty(); } else { for (ItemStack itemstack : this.matchingStacks) { if (itemstack.getItem() == p_test_1_.getItem()) { return true; } } return false; } } } public IntList getValidItemStacksPacked() { if (this.matchingStacksPacked == null) { this.determineMatchingStacks(); this.matchingStacksPacked = new IntArrayList(this.matchingStacks.length); for (ItemStack itemstack : this.matchingStacks) { this.matchingStacksPacked.add(RecipeItemHelper.pack(itemstack)); } this.matchingStacksPacked.sort(IntComparators.NATURAL_COMPARATOR); } return this.matchingStacksPacked; } public void write(PacketBuffer buffer) { this.determineMatchingStacks(); buffer.writeVarInt(this.matchingStacks.length); for (int i = 0; i < this.matchingStacks.length; ++i) { buffer.writeItemStack(this.matchingStacks[i]); } } public JsonElement serialize() { if (this.acceptedItems.length == 1) { return this.acceptedItems[0].serialize(); } else { JsonArray jsonarray = new JsonArray(); for (Ingredient.IItemList ingredient$iitemlist : this.acceptedItems) { jsonarray.add(ingredient$iitemlist.serialize()); } return jsonarray; } } public boolean hasNoMatchingItems() { return this.acceptedItems.length == 0 && (this.matchingStacks == null || this.matchingStacks.length == 0) && (this.matchingStacksPacked == null || this.matchingStacksPacked.isEmpty()); } private static Ingredient fromItemListStream(Stream <? extends Ingredient.IItemList > stream) { Ingredient ingredient = new Ingredient(stream); return ingredient.acceptedItems.length == 0 ? EMPTY : ingredient; } public static Ingredient fromItems(IItemProvider... itemsIn) { return fromStacks(Arrays.stream(itemsIn).map(ItemStack::new)); } public static Ingredient fromStacks(ItemStack... stacks) { return fromStacks(Arrays.stream(stacks)); } public static Ingredient fromStacks(Stream<ItemStack> stacks) { return fromItemListStream(stacks.filter((stack) -> { return !stack.isEmpty(); }).map((stack) -> { return new Ingredient.SingleItemList(stack); })); } public static Ingredient fromTag(ITag<Item> tagIn) { return fromItemListStream(Stream.of(new Ingredient.TagList(tagIn))); } public static Ingredient read(PacketBuffer buffer) { int i = buffer.readVarInt(); return fromItemListStream(Stream.generate(() -> { return new Ingredient.SingleItemList(buffer.readItemStack()); }).limit((long)i)); } public static Ingredient deserialize(@Nullable JsonElement json) { if (json != null && !json.isJsonNull()) { if (json.isJsonObject()) { return fromItemListStream(Stream.of(deserializeItemList(json.getAsJsonObject()))); } else if (json.isJsonArray()) { JsonArray jsonarray = json.getAsJsonArray(); if (jsonarray.size() == 0) { throw new JsonSyntaxException("Item array cannot be empty, at least one item must be defined"); } else { return fromItemListStream(StreamSupport.stream(jsonarray.spliterator(), false).map((element) -> { return deserializeItemList(JSONUtils.getJsonObject(element, "item")); })); } } else { throw new JsonSyntaxException("Expected item to be object or array of objects"); } } else { throw new JsonSyntaxException("Item cannot be null"); } } private static Ingredient.IItemList deserializeItemList(JsonObject json) { if (json.has("item") && json.has("tag")) { throw new JsonParseException("An ingredient entry is either a tag or an item, not both"); } else if (json.has("item")) { ResourceLocation resourcelocation1 = new ResourceLocation(JSONUtils.getString(json, "item")); Item item = Registry.ITEM.getOptional(resourcelocation1).orElseThrow(() -> { return new JsonSyntaxException("Unknown item '" + resourcelocation1 + "'"); }); return new Ingredient.SingleItemList(new ItemStack(item)); } else if (json.has("tag")) { ResourceLocation resourcelocation = new ResourceLocation(JSONUtils.getString(json, "tag")); ITag<Item> itag = TagCollectionManager.getManager().getItemTags().get(resourcelocation); if (itag == null) { throw new JsonSyntaxException("Unknown item tag '" + resourcelocation + "'"); } else { return new Ingredient.TagList(itag); } } else { throw new JsonParseException("An ingredient entry needs either a tag or an item"); } } interface IItemList { Collection<ItemStack> getStacks(); JsonObject serialize(); } static class SingleItemList implements Ingredient.IItemList { private final ItemStack stack; private SingleItemList(ItemStack stackIn) { this.stack = stackIn; } public Collection<ItemStack> getStacks() { return Collections.singleton(this.stack); } public JsonObject serialize() { JsonObject jsonobject = new JsonObject(); jsonobject.addProperty("item", Registry.ITEM.getKey(this.stack.getItem()).toString()); return jsonobject; } } static class TagList implements Ingredient.IItemList { private final ITag<Item> tag; private TagList(ITag<Item> tagIn) { this.tag = tagIn; } public Collection<ItemStack> getStacks() { List<ItemStack> list = Lists.newArrayList(); for (Item item : this.tag.getAllElements()) { list.add(new ItemStack(item)); } return list; } public JsonObject serialize() { JsonObject jsonobject = new JsonObject(); jsonobject.addProperty("tag", TagCollectionManager.getManager().getItemTags().getValidatedIdFromTag(this.tag).toString()); return jsonobject; } } }
package org.jboss.weld.lite.extension.translator; import jakarta.enterprise.inject.build.compatible.spi.Enhancement; import org.jboss.weld.lite.extension.translator.logging.LiteExtensionTranslatorLogger; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.function.Consumer; class ExtensionPhaseEnhancement extends ExtensionPhaseBase { private final List<ExtensionPhaseEnhancementAction> actions; ExtensionPhaseEnhancement(jakarta.enterprise.inject.spi.BeanManager beanManager, ExtensionInvoker util, SharedErrors errors, List<ExtensionPhaseEnhancementAction> actions) { super(ExtensionPhase.ENHANCEMENT, beanManager, util, errors); this.actions = actions; } @Override void runExtensionMethod(java.lang.reflect.Method method) { int numParameters = method.getParameterCount(); int numQueryParameters = 0; List<ExtensionMethodParameterType> parameters = new ArrayList<>(numParameters); for (int i = 0; i < numParameters; i++) { Class<?> parameterType = method.getParameterTypes()[i]; ExtensionMethodParameterType parameter = ExtensionMethodParameterType.of(parameterType); parameters.add(parameter); if (parameter.isQuery()) { numQueryParameters++; } parameter.verifyAvailable(ExtensionPhase.ENHANCEMENT, method); } if (numQueryParameters == 0 || numQueryParameters > 1) { throw LiteExtensionTranslatorLogger.LOG.incorrectParameterCount("ClassInfo, MethodInfo, FieldInfo, ClassConfig, MethodConfig, or FieldConfig", method, method.getDeclaringClass()); } ExtensionMethodParameterType query = parameters.stream() .filter(ExtensionMethodParameterType::isQuery) .findAny() .get(); // guaranteed to be there Consumer<jakarta.enterprise.inject.spi.ProcessAnnotatedType<?>> patAcceptor = pat -> { // for Class{Info,Config}, there's just 1 argument list (one call); // for {Field,Method}{Info,Config}, there's multiple argument lists // (one call for each field/method) List<List<Object>> argumentsForAllInvocations = new ArrayList<>(); if (query == ExtensionMethodParameterType.CLASS_INFO) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.CLASS_INFO) { argument = new ClassInfoImpl(pat.getAnnotatedType(), beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } else if (query == ExtensionMethodParameterType.CLASS_CONFIG) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.CLASS_CONFIG) { argument = new ClassConfigImpl(pat.configureAnnotatedType(), beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } else if (query == ExtensionMethodParameterType.METHOD_INFO) { for (jakarta.enterprise.inject.spi.AnnotatedMethod<?> targetMethod : pat.getAnnotatedType().getMethods()) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.METHOD_INFO) { argument = new MethodInfoImpl(targetMethod, beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } for (jakarta.enterprise.inject.spi.AnnotatedConstructor<?> targetConstructor : pat.getAnnotatedType().getConstructors()) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.METHOD_INFO) { argument = new MethodInfoImpl(targetConstructor, beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } } else if (query == ExtensionMethodParameterType.METHOD_CONFIG) { for (jakarta.enterprise.inject.spi.configurator.AnnotatedMethodConfigurator<?> targetMethodConfigurator : pat.configureAnnotatedType().methods()) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.METHOD_CONFIG) { argument = new MethodConfigImpl(targetMethodConfigurator, beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } for (jakarta.enterprise.inject.spi.configurator.AnnotatedConstructorConfigurator<?> targetConstructorConfigurator : pat.configureAnnotatedType().constructors()) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.METHOD_CONFIG) { argument = new MethodConstructorConfigImpl(targetConstructorConfigurator, beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } } else if (query == ExtensionMethodParameterType.FIELD_INFO) { for (jakarta.enterprise.inject.spi.AnnotatedField<?> targetField : pat.getAnnotatedType().getFields()) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.FIELD_INFO) { argument = new FieldInfoImpl(targetField, beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } } else if (query == ExtensionMethodParameterType.FIELD_CONFIG) { for (jakarta.enterprise.inject.spi.configurator.AnnotatedFieldConfigurator<?> targetFieldConfigurator : pat.configureAnnotatedType().fields()) { List<Object> arguments = new ArrayList<>(numParameters); for (ExtensionMethodParameterType parameter : parameters) { Object argument; if (parameter == ExtensionMethodParameterType.FIELD_CONFIG) { argument = new FieldConfigImpl(targetFieldConfigurator, beanManager); } else { argument = argumentForExtensionMethod(parameter, method); } arguments.add(argument); } argumentsForAllInvocations.add(arguments); } } else { throw LiteExtensionTranslatorLogger.LOG.unknownQueryParameter(query); } for (List<Object> arguments : argumentsForAllInvocations) { try { util.callExtensionMethod(method, arguments); } catch (ReflectiveOperationException e) { throw LiteExtensionTranslatorLogger.LOG.unableToInvokeExtensionMethod(method, arguments, e.toString()); } } }; Enhancement enhancement = method.getAnnotation(Enhancement.class); actions.add(new ExtensionPhaseEnhancementAction(new HashSet<>(Arrays.asList(enhancement.types())), enhancement.withSubtypes(), new HashSet<>(Arrays.asList(enhancement.withAnnotations())), patAcceptor)); } @Override Object argumentForExtensionMethod(ExtensionMethodParameterType type, java.lang.reflect.Method method) { if (type == ExtensionMethodParameterType.TYPES) { return new TypesImpl(beanManager); } return super.argumentForExtensionMethod(type, method); } }
/* * Copyright 2002-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.datasource.init; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.core.io.Resource; import org.springframework.core.io.support.EncodedResource; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; import java.io.IOException; import java.io.LineNumberReader; import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.ArrayList; import java.util.List; /** * Generic utility methods for working with SQL scripts. * * <p>Mainly for internal use within the framework. * * @author Thomas Risberg * @author Sam Brannen * @author Juergen Hoeller * @author Keith Donald * @author Dave Syer * @author Chris Beams * @author Oliver Gierke * @author Chris Baldwin * @author Nicolas Debeissat * @author Phillip Webb * @since 4.0.3 */ public abstract class ScriptUtils { /** * Default statement separator within SQL scripts: {@code ";"}. */ public static final String DEFAULT_STATEMENT_SEPARATOR = ";"; /** * Fallback statement separator within SQL scripts: {@code "\n"}. * <p>Used if neither a custom separator nor the * {@link #DEFAULT_STATEMENT_SEPARATOR} is present in a given script. */ public static final String FALLBACK_STATEMENT_SEPARATOR = "\n"; /** * End of file (EOF) SQL statement separator: {@code "^^^ END OF SCRIPT ^^^"}. * <p>This value may be supplied as the {@code separator} to {@link * #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String)} * to denote that an SQL script contains a single statement (potentially * spanning multiple lines) with no explicit statement separator. Note that * such a script should not actually contain this value; it is merely a * <em>virtual</em> statement separator. */ public static final String EOF_STATEMENT_SEPARATOR = "^^^ END OF SCRIPT ^^^"; /** * Default prefix for single-line comments within SQL scripts: {@code "--"}. */ public static final String DEFAULT_COMMENT_PREFIX = "--"; /** * Default prefixes for single-line comments within SQL scripts: {@code ["--"]}. * * @since 5.2 */ public static final String[] DEFAULT_COMMENT_PREFIXES = {DEFAULT_COMMENT_PREFIX}; /** * Default start delimiter for block comments within SQL scripts: {@code "/*"}. */ public static final String DEFAULT_BLOCK_COMMENT_START_DELIMITER = "/*"; /** * Default end delimiter for block comments within SQL scripts: <code>"*&#47;"</code>. */ public static final String DEFAULT_BLOCK_COMMENT_END_DELIMITER = "*/"; private static final Log logger = LogFactory.getLog(ScriptUtils.class); /** * Split an SQL script into separate statements delimited by the provided * separator character. Each individual statement will be added to the * provided {@code List}. * <p>Within the script, {@value #DEFAULT_COMMENT_PREFIX} will be used as the * comment prefix; any text beginning with the comment prefix and extending to * the end of the line will be omitted from the output. Similarly, * {@value #DEFAULT_BLOCK_COMMENT_START_DELIMITER} and * {@value #DEFAULT_BLOCK_COMMENT_END_DELIMITER} will be used as the * <em>start</em> and <em>end</em> block comment delimiters: any text enclosed * in a block comment will be omitted from the output. In addition, multiple * adjacent whitespace characters will be collapsed into a single space. * * @param script the SQL script * @param separator character separating each statement (typically a ';') * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script * @see #splitSqlScript(String, String, List) * @see #splitSqlScript(EncodedResource, String, String, String, String, String, List) */ public static void splitSqlScript(String script, char separator, List<String> statements) throws ScriptException { splitSqlScript(script, String.valueOf(separator), statements); } /** * Split an SQL script into separate statements delimited by the provided * separator string. Each individual statement will be added to the * provided {@code List}. * <p>Within the script, {@value #DEFAULT_COMMENT_PREFIX} will be used as the * comment prefix; any text beginning with the comment prefix and extending to * the end of the line will be omitted from the output. Similarly, * {@value #DEFAULT_BLOCK_COMMENT_START_DELIMITER} and * {@value #DEFAULT_BLOCK_COMMENT_END_DELIMITER} will be used as the * <em>start</em> and <em>end</em> block comment delimiters: any text enclosed * in a block comment will be omitted from the output. In addition, multiple * adjacent whitespace characters will be collapsed into a single space. * * @param script the SQL script * @param separator text separating each statement * (typically a ';' or newline character) * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script * @see #splitSqlScript(String, char, List) * @see #splitSqlScript(EncodedResource, String, String, String, String, String, List) */ public static void splitSqlScript(String script, String separator, List<String> statements) throws ScriptException { splitSqlScript(null, script, separator, DEFAULT_COMMENT_PREFIX, DEFAULT_BLOCK_COMMENT_START_DELIMITER, DEFAULT_BLOCK_COMMENT_END_DELIMITER, statements); } /** * Split an SQL script into separate statements delimited by the provided * separator string. Each individual statement will be added to the provided * {@code List}. * <p>Within the script, the provided {@code commentPrefix} will be honored: * any text beginning with the comment prefix and extending to the end of the * line will be omitted from the output. Similarly, the provided * {@code blockCommentStartDelimiter} and {@code blockCommentEndDelimiter} * delimiters will be honored: any text enclosed in a block comment will be * omitted from the output. In addition, multiple adjacent whitespace characters * will be collapsed into a single space. * * @param resource the resource from which the script was read * @param script the SQL script * @param separator text separating each statement * (typically a ';' or newline character) * @param commentPrefix the prefix that identifies SQL line comments * (typically "--") * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter; * never {@code null} or empty * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter; * never {@code null} or empty * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script */ public static void splitSqlScript(@Nullable EncodedResource resource, String script, String separator, String commentPrefix, String blockCommentStartDelimiter, String blockCommentEndDelimiter, List<String> statements) throws ScriptException { Assert.hasText(commentPrefix, "'commentPrefix' must not be null or empty"); splitSqlScript(resource, script, separator, new String[]{commentPrefix}, blockCommentStartDelimiter, blockCommentEndDelimiter, statements); } /** * Split an SQL script into separate statements delimited by the provided * separator string. Each individual statement will be added to the provided * {@code List}. * <p>Within the script, the provided {@code commentPrefixes} will be honored: * any text beginning with one of the comment prefixes and extending to the * end of the line will be omitted from the output. Similarly, the provided * {@code blockCommentStartDelimiter} and {@code blockCommentEndDelimiter} * delimiters will be honored: any text enclosed in a block comment will be * omitted from the output. In addition, multiple adjacent whitespace characters * will be collapsed into a single space. * * @param resource the resource from which the script was read * @param script the SQL script * @param separator text separating each statement * (typically a ';' or newline character) * @param commentPrefixes the prefixes that identify SQL line comments * (typically "--") * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter; * never {@code null} or empty * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter; * never {@code null} or empty * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script * @since 5.2 */ public static void splitSqlScript(@Nullable EncodedResource resource, String script, String separator, String[] commentPrefixes, String blockCommentStartDelimiter, String blockCommentEndDelimiter, List<String> statements) throws ScriptException { Assert.hasText(script, "'script' must not be null or empty"); Assert.notNull(separator, "'separator' must not be null"); Assert.notEmpty(commentPrefixes, "'commentPrefixes' must not be null or empty"); for (String commentPrefix : commentPrefixes) { Assert.hasText(commentPrefix, "'commentPrefixes' must not contain null or empty elements"); } Assert.hasText(blockCommentStartDelimiter, "'blockCommentStartDelimiter' must not be null or empty"); Assert.hasText(blockCommentEndDelimiter, "'blockCommentEndDelimiter' must not be null or empty"); StringBuilder sb = new StringBuilder(); boolean inSingleQuote = false; boolean inDoubleQuote = false; boolean inEscape = false; for (int i = 0; i < script.length(); i++) { char c = script.charAt(i); if (inEscape) { inEscape = false; sb.append(c); continue; } // MySQL style escapes if (c == '\\') { inEscape = true; sb.append(c); continue; } if (!inDoubleQuote && (c == '\'')) { inSingleQuote = !inSingleQuote; } else if (!inSingleQuote && (c == '"')) { inDoubleQuote = !inDoubleQuote; } if (!inSingleQuote && !inDoubleQuote) { if (script.startsWith(separator, i)) { // We've reached the end of the current statement if (sb.length() > 0) { statements.add(sb.toString()); sb = new StringBuilder(); } i += separator.length() - 1; continue; } else if (startsWithAny(script, commentPrefixes, i)) { // Skip over any content from the start of the comment to the EOL int indexOfNextNewline = script.indexOf('\n', i); if (indexOfNextNewline > i) { i = indexOfNextNewline; continue; } else { // If there's no EOL, we must be at the end of the script, so stop here. break; } } else if (script.startsWith(blockCommentStartDelimiter, i)) { // Skip over any block comments int indexOfCommentEnd = script.indexOf(blockCommentEndDelimiter, i); if (indexOfCommentEnd > i) { i = indexOfCommentEnd + blockCommentEndDelimiter.length() - 1; continue; } else { throw new ScriptParseException( "Missing block comment end delimiter: " + blockCommentEndDelimiter, resource); } } else if (c == ' ' || c == '\r' || c == '\n' || c == '\t') { // Avoid multiple adjacent whitespace characters if (sb.length() > 0 && sb.charAt(sb.length() - 1) != ' ') { c = ' '; } else { continue; } } } sb.append(c); } if (StringUtils.hasText(sb)) { statements.add(sb.toString()); } } /** * Read a script from the given resource, using "{@code --}" as the comment prefix * and "{@code ;}" as the statement separator, and build a String containing the lines. * * @param resource the {@code EncodedResource} to be read * @return {@code String} containing the script lines * @throws IOException in case of I/O errors */ static String readScript(EncodedResource resource) throws IOException { return readScript(resource, DEFAULT_COMMENT_PREFIXES, DEFAULT_STATEMENT_SEPARATOR, DEFAULT_BLOCK_COMMENT_END_DELIMITER); } /** * Read a script from the provided resource, using the supplied comment prefixes * and statement separator, and build a {@code String} containing the lines. * <p>Lines <em>beginning</em> with one of the comment prefixes are excluded * from the results; however, line comments anywhere else &mdash; for example, * within a statement &mdash; will be included in the results. * * @param resource the {@code EncodedResource} containing the script * to be processed * @param commentPrefixes the prefixes that identify comments in the SQL script * (typically "--") * @param separator the statement separator in the SQL script (typically ";") * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter * @return a {@code String} containing the script lines * @throws IOException in case of I/O errors */ private static String readScript(EncodedResource resource, @Nullable String[] commentPrefixes, @Nullable String separator, @Nullable String blockCommentEndDelimiter) throws IOException { try (LineNumberReader lnr = new LineNumberReader(resource.getReader())) { return readScript(lnr, commentPrefixes, separator, blockCommentEndDelimiter); } } /** * Read a script from the provided {@code LineNumberReader}, using the supplied * comment prefix and statement separator, and build a {@code String} containing * the lines. * <p>Lines <em>beginning</em> with the comment prefix are excluded from the * results; however, line comments anywhere else &mdash; for example, within * a statement &mdash; will be included in the results. * * @param lineNumberReader the {@code LineNumberReader} containing the script * to be processed * @param lineCommentPrefix the prefix that identifies comments in the SQL script * (typically "--") * @param separator the statement separator in the SQL script (typically ";") * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter * @return a {@code String} containing the script lines * @throws IOException in case of I/O errors */ public static String readScript(LineNumberReader lineNumberReader, @Nullable String lineCommentPrefix, @Nullable String separator, @Nullable String blockCommentEndDelimiter) throws IOException { String[] lineCommentPrefixes = (lineCommentPrefix != null) ? new String[]{lineCommentPrefix} : null; return readScript(lineNumberReader, lineCommentPrefixes, separator, blockCommentEndDelimiter); } /** * Read a script from the provided {@code LineNumberReader}, using the supplied * comment prefixes and statement separator, and build a {@code String} containing * the lines. * <p>Lines <em>beginning</em> with one of the comment prefixes are excluded * from the results; however, line comments anywhere else &mdash; for example, * within a statement &mdash; will be included in the results. * * @param lineNumberReader the {@code LineNumberReader} containing the script * to be processed * @param lineCommentPrefixes the prefixes that identify comments in the SQL script * (typically "--") * @param separator the statement separator in the SQL script (typically ";") * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter * @return a {@code String} containing the script lines * @throws IOException in case of I/O errors * @since 5.2 */ public static String readScript(LineNumberReader lineNumberReader, @Nullable String[] lineCommentPrefixes, @Nullable String separator, @Nullable String blockCommentEndDelimiter) throws IOException { String currentStatement = lineNumberReader.readLine(); StringBuilder scriptBuilder = new StringBuilder(); while (currentStatement != null) { if ((blockCommentEndDelimiter != null && currentStatement.contains(blockCommentEndDelimiter)) || (lineCommentPrefixes != null && !startsWithAny(currentStatement, lineCommentPrefixes, 0))) { if (scriptBuilder.length() > 0) { scriptBuilder.append('\n'); } scriptBuilder.append(currentStatement); } currentStatement = lineNumberReader.readLine(); } appendSeparatorToScriptIfNecessary(scriptBuilder, separator); return scriptBuilder.toString(); } private static void appendSeparatorToScriptIfNecessary(StringBuilder scriptBuilder, @Nullable String separator) { if (separator == null) { return; } String trimmed = separator.trim(); if (trimmed.length() == separator.length()) { return; } // separator ends in whitespace, so we might want to see if the script is trying // to end the same way if (scriptBuilder.lastIndexOf(trimmed) == scriptBuilder.length() - trimmed.length()) { scriptBuilder.append(separator.substring(trimmed.length())); } } private static boolean startsWithAny(String script, String[] prefixes, int offset) { for (String prefix : prefixes) { if (script.startsWith(prefix, offset)) { return true; } } return false; } /** * Does the provided SQL script contain the specified delimiter? * * @param script the SQL script * @param delim the string delimiting each statement - typically a ';' character */ public static boolean containsSqlScriptDelimiters(String script, String delim) { boolean inLiteral = false; boolean inEscape = false; for (int i = 0; i < script.length(); i++) { char c = script.charAt(i); if (inEscape) { inEscape = false; continue; } // MySQL style escapes if (c == '\\') { inEscape = true; continue; } if (c == '\'') { inLiteral = !inLiteral; } if (!inLiteral && script.startsWith(delim, i)) { return true; } } return false; } /** * Execute the given SQL script using default settings for statement * separators, comment delimiters, and exception handling flags. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource to load the SQL script from; encoded with the * current platform's default encoding * @throws ScriptException if an error occurred while executing the SQL script * @see #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String) * @see #DEFAULT_STATEMENT_SEPARATOR * @see #DEFAULT_COMMENT_PREFIX * @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER * @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection */ public static void executeSqlScript(Connection connection, Resource resource) throws ScriptException { executeSqlScript(connection, new EncodedResource(resource)); } /** * Execute the given SQL script using default settings for statement * separators, comment delimiters, and exception handling flags. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource (potentially associated with a specific encoding) * to load the SQL script from * @throws ScriptException if an error occurred while executing the SQL script * @see #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String) * @see #DEFAULT_STATEMENT_SEPARATOR * @see #DEFAULT_COMMENT_PREFIX * @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER * @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection */ public static void executeSqlScript(Connection connection, EncodedResource resource) throws ScriptException { executeSqlScript(connection, resource, false, false, DEFAULT_COMMENT_PREFIX, DEFAULT_STATEMENT_SEPARATOR, DEFAULT_BLOCK_COMMENT_START_DELIMITER, DEFAULT_BLOCK_COMMENT_END_DELIMITER); } /** * Execute the given SQL script. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource (potentially associated with a specific encoding) * to load the SQL script from * @param continueOnError whether or not to continue without throwing an exception * in the event of an error * @param ignoreFailedDrops whether or not to continue in the event of specifically * an error on a {@code DROP} statement * @param commentPrefix the prefix that identifies single-line comments in the * SQL script (typically "--") * @param separator the script statement separator; defaults to * {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to * {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to * {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a * single statement without a separator * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter * @throws ScriptException if an error occurred while executing the SQL script * @see #DEFAULT_STATEMENT_SEPARATOR * @see #FALLBACK_STATEMENT_SEPARATOR * @see #EOF_STATEMENT_SEPARATOR * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection */ public static void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError, boolean ignoreFailedDrops, String commentPrefix, @Nullable String separator, String blockCommentStartDelimiter, String blockCommentEndDelimiter) throws ScriptException { executeSqlScript(connection, resource, continueOnError, ignoreFailedDrops, new String[]{commentPrefix}, separator, blockCommentStartDelimiter, blockCommentEndDelimiter); } /** * Execute the given SQL script. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource (potentially associated with a specific encoding) * to load the SQL script from * @param continueOnError whether or not to continue without throwing an exception * in the event of an error * @param ignoreFailedDrops whether or not to continue in the event of specifically * an error on a {@code DROP} statement * @param commentPrefixes the prefixes that identify single-line comments in the * SQL script (typically "--") * @param separator the script statement separator; defaults to * {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to * {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to * {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a * single statement without a separator * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter * @throws ScriptException if an error occurred while executing the SQL script * @see #DEFAULT_STATEMENT_SEPARATOR * @see #FALLBACK_STATEMENT_SEPARATOR * @see #EOF_STATEMENT_SEPARATOR * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection * @since 5.2 */ public static void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError, boolean ignoreFailedDrops, String[] commentPrefixes, @Nullable String separator, String blockCommentStartDelimiter, String blockCommentEndDelimiter) throws ScriptException { try { if (logger.isDebugEnabled()) { logger.debug("Executing SQL script from " + resource); } long startTime = System.currentTimeMillis(); String script; try { script = readScript(resource, commentPrefixes, separator, blockCommentEndDelimiter); } catch (IOException ex) { throw new CannotReadScriptException(resource, ex); } if (separator == null) { separator = DEFAULT_STATEMENT_SEPARATOR; } if (!EOF_STATEMENT_SEPARATOR.equals(separator) && !containsSqlScriptDelimiters(script, separator)) { separator = FALLBACK_STATEMENT_SEPARATOR; } List<String> statements = new ArrayList<>(); splitSqlScript(resource, script, separator, commentPrefixes, blockCommentStartDelimiter, blockCommentEndDelimiter, statements); int stmtNumber = 0; Statement stmt = connection.createStatement(); try { for (String statement : statements) { stmtNumber++; try { stmt.execute(statement); int rowsAffected = stmt.getUpdateCount(); if (logger.isDebugEnabled()) { logger.debug(rowsAffected + " returned as update count for SQL: " + statement); SQLWarning warningToLog = stmt.getWarnings(); while (warningToLog != null) { logger.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() + "', error code '" + warningToLog.getErrorCode() + "', message [" + warningToLog.getMessage() + "]"); warningToLog = warningToLog.getNextWarning(); } } } catch (SQLException ex) { boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop"); if (continueOnError || (dropStatement && ignoreFailedDrops)) { if (logger.isDebugEnabled()) { logger.debug(ScriptStatementFailedException.buildErrorMessage(statement, stmtNumber, resource), ex); } } else { throw new ScriptStatementFailedException(statement, stmtNumber, resource, ex); } } } } finally { try { stmt.close(); } catch (Throwable ex) { logger.trace("Could not close JDBC Statement", ex); } } long elapsedTime = System.currentTimeMillis() - startTime; if (logger.isDebugEnabled()) { logger.debug("Executed SQL script from " + resource + " in " + elapsedTime + " ms."); } } catch (Exception ex) { if (ex instanceof ScriptException) { throw (ScriptException) ex; } throw new UncategorizedScriptException( "Failed to execute database script from resource [" + resource + "]", ex); } } }
package databute.databuter.cluster.handshake.request; import com.google.common.base.MoreObjects; import databute.network.message.Message; import databute.network.message.MessageCode; import static com.google.common.base.Preconditions.checkNotNull; public class HandshakeRequestMessage implements Message { private final String id; public HandshakeRequestMessage(String id) { this.id = checkNotNull(id, "id"); } @Override public MessageCode messageCode() { return MessageCode.HANDSHAKE_REQUEST; } public String id() { return id; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("id", id) .toString(); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.BatchControllerClient.ListBatchesPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1.Batch; import com.google.cloud.dataproc.v1.BatchOperationMetadata; import com.google.cloud.dataproc.v1.CreateBatchRequest; import com.google.cloud.dataproc.v1.DeleteBatchRequest; import com.google.cloud.dataproc.v1.GetBatchRequest; import com.google.cloud.dataproc.v1.ListBatchesRequest; import com.google.cloud.dataproc.v1.ListBatchesResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BatchControllerStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dataproc.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getBatch to 30 seconds: * * <pre>{@code * BatchControllerStubSettings.Builder batchControllerSettingsBuilder = * BatchControllerStubSettings.newBuilder(); * batchControllerSettingsBuilder * .getBatchSettings() * .setRetrySettings( * batchControllerSettingsBuilder * .getBatchSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * BatchControllerStubSettings batchControllerSettings = batchControllerSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class BatchControllerStubSettings extends StubSettings<BatchControllerStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<CreateBatchRequest, Operation> createBatchSettings; private final OperationCallSettings<CreateBatchRequest, Batch, BatchOperationMetadata> createBatchOperationSettings; private final UnaryCallSettings<GetBatchRequest, Batch> getBatchSettings; private final PagedCallSettings<ListBatchesRequest, ListBatchesResponse, ListBatchesPagedResponse> listBatchesSettings; private final UnaryCallSettings<DeleteBatchRequest, Empty> deleteBatchSettings; private static final PagedListDescriptor<ListBatchesRequest, ListBatchesResponse, Batch> LIST_BATCHES_PAGE_STR_DESC = new PagedListDescriptor<ListBatchesRequest, ListBatchesResponse, Batch>() { @Override public String emptyToken() { return ""; } @Override public ListBatchesRequest injectToken(ListBatchesRequest payload, String token) { return ListBatchesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListBatchesRequest injectPageSize(ListBatchesRequest payload, int pageSize) { return ListBatchesRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListBatchesRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListBatchesResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Batch> extractResources(ListBatchesResponse payload) { return payload.getBatchesList() == null ? ImmutableList.<Batch>of() : payload.getBatchesList(); } }; private static final PagedListResponseFactory< ListBatchesRequest, ListBatchesResponse, ListBatchesPagedResponse> LIST_BATCHES_PAGE_STR_FACT = new PagedListResponseFactory< ListBatchesRequest, ListBatchesResponse, ListBatchesPagedResponse>() { @Override public ApiFuture<ListBatchesPagedResponse> getFuturePagedResponse( UnaryCallable<ListBatchesRequest, ListBatchesResponse> callable, ListBatchesRequest request, ApiCallContext context, ApiFuture<ListBatchesResponse> futureResponse) { PageContext<ListBatchesRequest, ListBatchesResponse, Batch> pageContext = PageContext.create(callable, LIST_BATCHES_PAGE_STR_DESC, request, context); return ListBatchesPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to createBatch. */ public UnaryCallSettings<CreateBatchRequest, Operation> createBatchSettings() { return createBatchSettings; } /** Returns the object with the settings used for calls to createBatch. */ public OperationCallSettings<CreateBatchRequest, Batch, BatchOperationMetadata> createBatchOperationSettings() { return createBatchOperationSettings; } /** Returns the object with the settings used for calls to getBatch. */ public UnaryCallSettings<GetBatchRequest, Batch> getBatchSettings() { return getBatchSettings; } /** Returns the object with the settings used for calls to listBatches. */ public PagedCallSettings<ListBatchesRequest, ListBatchesResponse, ListBatchesPagedResponse> listBatchesSettings() { return listBatchesSettings; } /** Returns the object with the settings used for calls to deleteBatch. */ public UnaryCallSettings<DeleteBatchRequest, Empty> deleteBatchSettings() { return deleteBatchSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public BatchControllerStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBatchControllerStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "dataproc.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dataproc.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(BatchControllerStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected BatchControllerStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); createBatchSettings = settingsBuilder.createBatchSettings().build(); createBatchOperationSettings = settingsBuilder.createBatchOperationSettings().build(); getBatchSettings = settingsBuilder.getBatchSettings().build(); listBatchesSettings = settingsBuilder.listBatchesSettings().build(); deleteBatchSettings = settingsBuilder.deleteBatchSettings().build(); } /** Builder for BatchControllerStubSettings. */ public static class Builder extends StubSettings.Builder<BatchControllerStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<CreateBatchRequest, Operation> createBatchSettings; private final OperationCallSettings.Builder<CreateBatchRequest, Batch, BatchOperationMetadata> createBatchOperationSettings; private final UnaryCallSettings.Builder<GetBatchRequest, Batch> getBatchSettings; private final PagedCallSettings.Builder< ListBatchesRequest, ListBatchesResponse, ListBatchesPagedResponse> listBatchesSettings; private final UnaryCallSettings.Builder<DeleteBatchRequest, Empty> deleteBatchSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); createBatchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createBatchOperationSettings = OperationCallSettings.newBuilder(); getBatchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listBatchesSettings = PagedCallSettings.newBuilder(LIST_BATCHES_PAGE_STR_FACT); deleteBatchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createBatchSettings, getBatchSettings, listBatchesSettings, deleteBatchSettings); initDefaults(this); } protected Builder(BatchControllerStubSettings settings) { super(settings); createBatchSettings = settings.createBatchSettings.toBuilder(); createBatchOperationSettings = settings.createBatchOperationSettings.toBuilder(); getBatchSettings = settings.getBatchSettings.toBuilder(); listBatchesSettings = settings.listBatchesSettings.toBuilder(); deleteBatchSettings = settings.deleteBatchSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createBatchSettings, getBatchSettings, listBatchesSettings, deleteBatchSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .createBatchSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .getBatchSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .listBatchesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .deleteBatchSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")); builder .createBatchOperationSettings() .setInitialCallSettings( UnaryCallSettings.<CreateBatchRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Batch.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(BatchOperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelay(Duration.ofMillis(45000L)) .setInitialRpcTimeout(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to createBatch. */ public UnaryCallSettings.Builder<CreateBatchRequest, Operation> createBatchSettings() { return createBatchSettings; } /** Returns the builder for the settings used for calls to createBatch. */ @BetaApi( "The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings.Builder<CreateBatchRequest, Batch, BatchOperationMetadata> createBatchOperationSettings() { return createBatchOperationSettings; } /** Returns the builder for the settings used for calls to getBatch. */ public UnaryCallSettings.Builder<GetBatchRequest, Batch> getBatchSettings() { return getBatchSettings; } /** Returns the builder for the settings used for calls to listBatches. */ public PagedCallSettings.Builder< ListBatchesRequest, ListBatchesResponse, ListBatchesPagedResponse> listBatchesSettings() { return listBatchesSettings; } /** Returns the builder for the settings used for calls to deleteBatch. */ public UnaryCallSettings.Builder<DeleteBatchRequest, Empty> deleteBatchSettings() { return deleteBatchSettings; } @Override public BatchControllerStubSettings build() throws IOException { return new BatchControllerStubSettings(this); } } }
/******************************************************************************* * Copyright (c) 2013-2018 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, * Version 2.0 which accompanies this distribution and is available at * http://www.apache.org/licenses/LICENSE-2.0.txt ******************************************************************************/ package org.locationtech.geowave.adapter.raster.adapter.merge.nodata; import java.awt.image.Raster; import java.awt.image.WritableRaster; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMetadata.SampleIndex; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class NoDataMetadataFactory { private static class NoDataSummary { private final Set<SampleIndex> indices; private final double[][] usedNoDataValues; public NoDataSummary( final Set<SampleIndex> indices, final double[][] usedNoDataValues ) { this.indices = indices; this.usedNoDataValues = usedNoDataValues; } } private static final int MAX_LIST_NO_DATA = 20; public static NoDataMetadata createMetadata( final double[][] allNoDataValues, final Geometry shape, final Raster data ) { final NoDataSummary noDataSummary = getNoDataSummary( allNoDataValues, shape, data); return createMetadata( noDataSummary, new Geometry[] { shape }, data.getWidth(), data.getHeight()); } public static NoDataMetadata mergeMetadata( final NoDataMetadata noDataMetadata1, final WritableRaster raster1, final NoDataMetadata noDataMetadata2, final WritableRaster raster2 ) { if ((noDataMetadata1 == null) || (noDataMetadata2 == null)) { // this implies that there is no nodata values in one of the rasters // so there is no nodata values in the merge return null; } final Set<SampleIndex> noDataIndices1 = noDataMetadata1.getNoDataIndices(); final Set<SampleIndex> noDataIndices2 = noDataMetadata2.getNoDataIndices(); if ((noDataIndices1 != null) && (noDataIndices2 != null)) { // simple case, just take the intersection of the sets noDataIndices2.retainAll(noDataIndices1); return new NoDataBySampleIndex( noDataIndices2); } else if (noDataIndices1 != null) { // just determine which of the no data indices are covered by the // second set of metadata and remove them return mergeMetadataBySummary( noDataIndices1, noDataMetadata2, raster2); } else if (noDataIndices2 != null) { // just determine which of the no data indices are covered by the // first set of metadata and remove them return mergeMetadataBySummary( noDataIndices2, noDataMetadata1, raster1); } else if ((noDataMetadata1 instanceof NoDataByFilter) && (noDataMetadata2 instanceof NoDataByFilter)) { final NoDataByFilter noDataByFilter1 = ((NoDataByFilter) noDataMetadata1); final NoDataByFilter noDataByFilter2 = ((NoDataByFilter) noDataMetadata2); final double[][] noDataPerBand1 = noDataByFilter1.getNoDataPerBand(); final double[][] noDataPerBand2 = noDataByFilter2.getNoDataPerBand(); // union the no data values from each filter final int numBands = Math.min( noDataPerBand1.length, noDataPerBand2.length); final double[][] allNoDataValues = new double[numBands][]; for (int b = 0; b < numBands; b++) { final Set<Double> noDataValuesInBand = new HashSet<Double>(); if (noDataPerBand1[b] != null) { for (final double noDataValue : noDataPerBand1[b]) { noDataValuesInBand.add(noDataValue); } } if (noDataPerBand2[b] != null) { for (final double noDataValue : noDataPerBand2[b]) { noDataValuesInBand.add(noDataValue); } } allNoDataValues[b] = new double[noDataValuesInBand.size()]; int i = 0; final Iterator<Double> it = noDataValuesInBand.iterator(); while (it.hasNext()) { allNoDataValues[b][i++] = it.next(); } } return mergeMetadataBySummary( allNoDataValues, noDataByFilter1, raster1, noDataByFilter2, raster2); } else { // this should never happen because the only implementations of // metadata are by index or by filter but just in case iteratively // go through every sample, determine if its covered by the first or // the second set of metadata and use the indices return exhaustiveMergeMetadata( noDataMetadata1, raster1, noDataMetadata2, raster2); } } private static NoDataMetadata createMetadata( final NoDataSummary noDataSummary, final Geometry[] shapes, final int width, final int height ) { if (noDataSummary.indices.size() > MAX_LIST_NO_DATA) { Geometry finalShape; if ((shapes == null) || (shapes.length == 0)) { finalShape = null; } else { finalShape = shapes[0]; if ((shapes.length > 1) && (finalShape != null)) { for (int i = 1; i < shapes.length; i++) { if (shapes[i] == null) { finalShape = null; break; } else { finalShape = finalShape.union(shapes[i]); } } } } if ((finalShape != null) && finalShape.covers(new GeometryFactory().toGeometry(new Envelope( 0, width, 0, height)))) { // if the coverage of this geometric union ever gets to the // point that it fully covers the raster, stop storing it and // just set the geometry to null finalShape = null; } return new NoDataByFilter( finalShape, noDataSummary.usedNoDataValues); } else if (!noDataSummary.indices.isEmpty()) { // just go through every raster sample and determine whether it // qualifies as null data return new NoDataBySampleIndex( noDataSummary.indices); } else { // the "no data" samples in the dataset must be 0, so just return // null for the metadata return null; } } private static NoDataMetadata mergeMetadataBySummary( final Set<SampleIndex> noDataIndices, final NoDataMetadata noDataMetadata, final WritableRaster raster ) { final Iterator<SampleIndex> indices = noDataIndices.iterator(); while (indices.hasNext()) { final SampleIndex index = indices.next(); if (!noDataMetadata.isNoData( index, raster.getSampleDouble( index.getX(), index.getY(), index.getBand()))) { indices.remove(); } } return new NoDataBySampleIndex( noDataIndices); } private static NoDataMetadata exhaustiveMergeMetadata( final NoDataMetadata noDataMetadata1, final WritableRaster raster1, final NoDataMetadata noDataMetadata2, final WritableRaster raster2 ) { final int width = Math.min( raster1.getWidth(), raster2.getWidth()); final int height = Math.min( raster1.getHeight(), raster2.getHeight()); final int numBands = Math.min( raster1.getNumBands(), raster2.getNumBands()); final Set<SampleIndex> indices = new HashSet<SampleIndex>(); for (int b = 0; b < numBands; b++) { for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { final SampleIndex index = new SampleIndex( x, y, b); if (noDataMetadata1.isNoData( index, raster1.getSampleDouble( x, y, b)) && noDataMetadata2.isNoData( index, raster2.getSampleDouble( x, y, b))) { indices.add(index); } } } } return new NoDataBySampleIndex( indices); } private static NoDataMetadata mergeMetadataBySummary( final double[][] allNoDataValues, final NoDataByFilter noDataMetadata1, final WritableRaster raster1, final NoDataByFilter noDataMetadata2, final WritableRaster raster2 ) { final NoDataSummary noDataSummary = getNoDataSummary( allNoDataValues, noDataMetadata1, raster1, noDataMetadata2, raster2); return createMetadata( noDataSummary, new Geometry[] { noDataMetadata1.getShape(), noDataMetadata2.getShape() }, raster2.getWidth(), // both rasters better be the same // dimensions raster2.getHeight()); } private static NoDataSummary getNoDataSummary( final double[][] allNoDataValues, final NoDataByFilter noDataMetadata1, final WritableRaster raster1, final NoDataByFilter noDataMetadata2, final WritableRaster raster2 ) { final int width = Math.min( raster1.getWidth(), raster2.getWidth()); final int height = Math.min( raster1.getHeight(), raster2.getHeight()); final int numBands = Math.min( raster1.getNumBands(), raster2.getNumBands()); return getNoDataSummary( allNoDataValues, new MultiShape( new Geometry[] { noDataMetadata1.getShape(), noDataMetadata2.getShape() }), new MultiRaster( new Raster[] { raster1, raster2 }), width, height, numBands); } private static NoDataSummary getNoDataSummary( final double[][] allNoDataValues, final Geometry shape, final Raster data ) { return getNoDataSummary( allNoDataValues, new SingleShape( shape), new SingleRaster( data), data.getWidth(), data.getHeight(), data.getNumBands()); } private static NoDataSummary getNoDataSummary( final double[][] allNoDataValues, final NoDataByCoordinate shape, final NoDataBySample data, final int width, final int height, final int numBands ) { final Set<Double>[] noDataValuesPerBand; boolean skipNoData; final Set<SampleIndex> indices = new HashSet<SampleIndex>(); if (allNoDataValues == null) { skipNoData = true; noDataValuesPerBand = null; if (shape == null) { return new NoDataSummary( indices, new double[][] {}); } } else { noDataValuesPerBand = new Set[numBands]; for (int b = 0; b < numBands; b++) { noDataValuesPerBand[b] = new HashSet<Double>(); } skipNoData = false; } for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { if (shape.isNoData( x, y)) { for (int b = 0; b < numBands; b++) { indices.add(new SampleIndex( x, y, b)); } // this will ignore the no data values for this x,y // which should be fine because the shape will // always classify this x,y as "no data" } else if (!skipNoData) { for (int b = 0; b < numBands; b++) { if (allNoDataValues[b] == null) { continue; } else { final double[] samples = data.getSampleValues( x, y, b); for (int i = 0; i < allNoDataValues[b].length; i++) { // if a single sample is not a "no data" value // then it is valid boolean noData = true; for (final double sample : samples) { // we wrap it with Object equality to make // sure we generically catch special // cases, such as NaN and positive and // negative infinite if (!new Double( sample).equals(allNoDataValues[b][i])) { noData = false; break; } } if (noData) { indices.add(new SampleIndex( x, y, b)); if (noDataValuesPerBand != null && noDataValuesPerBand[b] != null) { noDataValuesPerBand[b].add(allNoDataValues[b][i]); } } } } } } } } final double[][] usedNoDataValues; if (!skipNoData && noDataValuesPerBand != null) { usedNoDataValues = new double[noDataValuesPerBand.length][]; for (int b = 0; b < noDataValuesPerBand.length; b++) { usedNoDataValues[b] = new double[noDataValuesPerBand[b].size()]; int i = 0; final Iterator<Double> noDataValues = noDataValuesPerBand[b].iterator(); while (noDataValues.hasNext()) { usedNoDataValues[b][i++] = noDataValues.next(); } } } else { usedNoDataValues = new double[][] {}; } return new NoDataSummary( indices, usedNoDataValues); } private static interface NoDataByCoordinate { public boolean isNoData( int x, int y ); } private static interface NoDataBySample { public double[] getSampleValues( int x, int y, int b ); } private static class SingleShape implements NoDataByCoordinate { private final Geometry shape; public SingleShape( final Geometry shape ) { this.shape = shape; } @Override public boolean isNoData( final int x, final int y ) { return ((shape != null) && !shape.intersects(new GeometryFactory().createPoint(new Coordinate( x, y)))); } } private static class MultiShape implements NoDataByCoordinate { private final Geometry[] shapes; private boolean acceptNone = false; public MultiShape( final Geometry[] shapes ) { this.shapes = shapes; if ((shapes == null) || (shapes.length == 0)) { acceptNone = true; } else { for (final Geometry shape : shapes) { if (shape == null) { acceptNone = true; } } } } @Override public boolean isNoData( final int x, final int y ) { if (!acceptNone) { for (final Geometry shape : shapes) { // if any one intersects the point than it is not "no data" // based on shape if (shape.intersects(new GeometryFactory().createPoint(new Coordinate( x, y)))) { return false; } } return true; } return false; } } private static class SingleRaster implements NoDataBySample { private final Raster raster; public SingleRaster( final Raster raster ) { this.raster = raster; } @Override public double[] getSampleValues( final int x, final int y, final int b ) { return new double[] { raster.getSampleDouble( x, y, b) }; } } private static class MultiRaster implements NoDataBySample { private final Raster[] rasters; public MultiRaster( final Raster[] rasters ) { this.rasters = rasters; } @Override public double[] getSampleValues( final int x, final int y, final int b ) { final double[] samples = new double[rasters.length]; for (int i = 0; i < rasters.length; i++) { samples[i] = rasters[i].getSampleDouble( x, y, b); } return samples; } } }
/* * Copyright [2005] [University Corporation for Advanced Internet Development, Inc.] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package org.opensaml.saml2.core.impl; import org.opensaml.Configuration; import org.opensaml.common.impl.AbstractSAMLObjectMarshaller; import org.opensaml.saml2.core.RequestAbstractType; import org.opensaml.xml.XMLObject; import org.opensaml.xml.io.MarshallingException; import org.w3c.dom.Element; /** * A thread safe Marshaller for {@link org.opensaml.saml2.core.RequestAbstractType} objects. */ public abstract class RequestAbstractTypeMarshaller extends AbstractSAMLObjectMarshaller { /** Constructor. */ protected RequestAbstractTypeMarshaller() { super(); } /** * Constructor. * * @param namespaceURI the namespace URI of either the schema type QName or element QName of the elements this * marshaller operates on * @param elementLocalName the local name of either the schema type QName or element QName of the elements this * marshaller operates on */ protected RequestAbstractTypeMarshaller(String namespaceURI, String elementLocalName) { super(namespaceURI, elementLocalName); } /** {@inheritDoc} */ protected void marshallAttributes(XMLObject samlObject, Element domElement) throws MarshallingException { RequestAbstractType req = (RequestAbstractType) samlObject; if (req.getVersion() != null) { domElement.setAttributeNS(null, RequestAbstractType.VERSION_ATTRIB_NAME, req.getVersion().toString()); } if (req.getID() != null) { domElement.setAttributeNS(null, RequestAbstractType.ID_ATTRIB_NAME, req.getID()); domElement.setIdAttributeNS(null, RequestAbstractType.ID_ATTRIB_NAME, true); } if (req.getVersion() != null) { domElement.setAttributeNS(null, RequestAbstractType.VERSION_ATTRIB_NAME, req.getVersion().toString()); } if (req.getIssueInstant() != null) { String iiStr = Configuration.getSAMLDateFormatter().print(req.getIssueInstant()); domElement.setAttributeNS(null, RequestAbstractType.ISSUE_INSTANT_ATTRIB_NAME, iiStr); } if (req.getDestination() != null) { domElement.setAttributeNS(null, RequestAbstractType.DESTINATION_ATTRIB_NAME, req.getDestination()); } if (req.getConsent() != null) { domElement.setAttributeNS(null, RequestAbstractType.CONSENT_ATTRIB_NAME, req.getConsent()); } } }
/* * Copyright MapStruct Authors. * * Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package org.mapstruct.ap.test.selection.generics; public class TypeB extends TypeA { public TypeB() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.schema; import java.util.Arrays; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.util.SizedUtil; /** * * Class to track whether or not a value is null. * The value is a zero-based position in the schema provided. * * * @since 0.1 * */ public class ValueBitSet { public final static ValueBitSet EMPTY_VALUE_BITSET = new ValueBitSet(); private static final int BITS_PER_LONG = 64; private static final int BITS_PER_SHORT = 16; private final long[] bits; private final ValueSchema schema; private int maxSetBit = -1; public static ValueBitSet newInstance(ValueSchema schema) { if (schema.getFieldCount() == schema.getMinNullable()) { return EMPTY_VALUE_BITSET; } return new ValueBitSet(schema); } private ValueBitSet() { schema = null; bits = new long[0]; } private ValueBitSet(ValueSchema schema) { this.schema = schema; bits = new long[Math.max(1,(schema.getFieldCount() - schema.getMinNullable() + BITS_PER_LONG -1) / BITS_PER_LONG)]; } public int getMaxSetBit() { return maxSetBit; } private boolean isVarLength() { return schema == null ? false : schema.getFieldCount() - schema.getMinNullable() > BITS_PER_SHORT; } public int getNullCount(int nBit, int nFields) { if (schema == null) { return 0; } int count = 0; int index = nBit/BITS_PER_LONG; // Shift right based on the bit index, because we aren't interested in the bits before this. int shiftRight = nBit % BITS_PER_LONG; int bitsToLeft = BITS_PER_LONG - shiftRight; // Shift left based on the number of fields we're interested in counting. int shiftLeft = Math.max(0, (BITS_PER_LONG - nFields)); // Mask off the bits of interest by shifting the bitset. count += Math.min(nFields, bitsToLeft) - (Long.bitCount((bits[index] >>> shiftRight) << shiftLeft)); // Subtract from the number of fields the total number of possible fields we looked at nFields -= bitsToLeft; if (nFields > 0) { // If more fields to count, then walk through the successive long bits while (nFields > BITS_PER_LONG) { count += BITS_PER_LONG - Long.bitCount(bits[++index]); nFields -= BITS_PER_LONG; } // Count the final remaining fields if (nFields > 0) { count += nFields - Long.bitCount(bits[++index] << (BITS_PER_LONG - nFields)); } } return count; } /** * Serialize the value bit set into a byte array. The byte array * is expected to have enough room (use {@link #getEstimatedLength()} * to ensure enough room exists. * @param b the byte array into which to put the serialized bit set * @param offset the offset into the byte array * @return the incremented offset */ public int toBytes(byte[] b, int offset) { if (schema == null) { return offset; } // If the total number of possible values is bigger than 16 bits (the // size of a short), then serialize the long array followed by the // array length. if (isVarLength()) { short nLongs = (short)((maxSetBit + BITS_PER_LONG) / BITS_PER_LONG); for (int i = 0; i < nLongs; i++) { offset = Bytes.putLong(b, offset, bits[i]); } offset = Bytes.putShort(b, offset, nLongs); } else { // Else if the number of values is less than or equal to 16, // serialize the bits directly into a short. offset = Bytes.putShort(b, offset, (short)bits[0]); } return offset; } public void clear() { Arrays.fill(bits, 0); maxSetBit = -1; } public boolean get(int nBit) { int lIndex = nBit / BITS_PER_LONG; int bIndex = nBit % BITS_PER_LONG; return (bits[lIndex] & (1L << bIndex)) != 0; } public void set(int nBit) { int lIndex = nBit / BITS_PER_LONG; int bIndex = nBit % BITS_PER_LONG; bits[lIndex] |= (1L << bIndex); maxSetBit = Math.max(maxSetBit, nBit); } public void or(ImmutableBytesWritable ptr) { or(ptr, isVarLength() ? Bytes.SIZEOF_SHORT + 1 : Bytes.SIZEOF_SHORT); } public void or(ImmutableBytesWritable ptr, int length) { if (schema == null || length == 0) { return; } if (length > Bytes.SIZEOF_SHORT) { int offset = ptr.getOffset() + ptr.getLength() - Bytes.SIZEOF_SHORT; short nLongs = Bytes.toShort(ptr.get(), offset); offset -= nLongs * Bytes.SIZEOF_LONG; for (int i = 0; i < nLongs; i++) { bits[i] |= Bytes.toLong(ptr.get(), offset); offset += Bytes.SIZEOF_LONG; } maxSetBit = Math.max(maxSetBit, nLongs * BITS_PER_LONG - 1); } else { long l = Bytes.toShort(ptr.get(), ptr.getOffset() + ptr.getLength() - Bytes.SIZEOF_SHORT); bits[0] |= l; maxSetBit = Math.max(maxSetBit, (bits[0] == 0 ? 0 : BITS_PER_SHORT) - 1); } } /** * @return Max serialization size */ public int getEstimatedLength() { if (schema == null) { return 0; } return Bytes.SIZEOF_SHORT + (isVarLength() ? (maxSetBit + BITS_PER_LONG) / BITS_PER_LONG * Bytes.SIZEOF_LONG : 0); } public static int getSize(int nBits) { return SizedUtil.OBJECT_SIZE + SizedUtil.POINTER_SIZE + SizedUtil.ARRAY_SIZE + SizedUtil.INT_SIZE + (nBits + BITS_PER_LONG - 1) / BITS_PER_LONG * Bytes.SIZEOF_LONG; } /** * @return Size of object in memory */ public int getSize() { if (schema == null) { return 0; } return SizedUtil.OBJECT_SIZE + SizedUtil.POINTER_SIZE + SizedUtil.ARRAY_SIZE + SizedUtil.LONG_SIZE * bits.length + SizedUtil.INT_SIZE; } public void or(ValueBitSet isSet) { for (int i = 0; i < bits.length; i++) { bits[i] |= isSet.bits[i]; } maxSetBit = Math.max(maxSetBit, isSet.maxSetBit); } }
/* * Copyright OpenSearch Contributors. * SPDX-License-Identifier: Apache-2.0 */ package org.opensearch.cluster.routing.allocation; import org.opensearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; /** * Allocation constraints specify conditions which, if breached, reduce the * priority of a node for receiving shard allocations. */ public class AllocationConstraints { public final long CONSTRAINT_WEIGHT = 1000000L; private List<Predicate<ConstraintParams>> constraintPredicates; public AllocationConstraints() { this.constraintPredicates = new ArrayList<>(1); this.constraintPredicates.add(isIndexShardsPerNodeBreached()); } class ConstraintParams { private BalancedShardsAllocator.Balancer balancer; private BalancedShardsAllocator.ModelNode node; private String index; ConstraintParams(BalancedShardsAllocator.Balancer balancer, BalancedShardsAllocator.ModelNode node, String index) { this.balancer = balancer; this.node = node; this.index = index; } } /** * Evaluates configured allocation constraint predicates for given node - index * combination; and returns a weight value based on the number of breached * constraints. * * Constraint weight should be added to the weight calculated via weight * function, to reduce priority of allocating on nodes with breached * constraints. * * This weight function is used only in case of unassigned shards to avoid overloading a newly added node. * Weight calculation in other scenarios like shard movement and re-balancing remain unaffected by this function. */ public long weight(BalancedShardsAllocator.Balancer balancer, BalancedShardsAllocator.ModelNode node, String index) { int constraintsBreached = 0; ConstraintParams params = new ConstraintParams(balancer, node, index); for (Predicate<ConstraintParams> predicate : constraintPredicates) { if (predicate.test(params)) { constraintsBreached++; } } return constraintsBreached * CONSTRAINT_WEIGHT; } /** * Constraint to control number of shards of an index allocated on a single * node. * * In current weight function implementation, when a node has significantly * fewer shards than other nodes (e.g. during single new node addition or node * replacement), its weight is much less than other nodes. All shard allocations * at this time tend to land on the new node with skewed weight. This breaks * index level balance in the cluster, by creating all shards of the same index * on one node, often resulting in a hotspot on that node. * * This constraint is breached when balancer attempts to allocate more than * average shards per index per node. */ private Predicate<ConstraintParams> isIndexShardsPerNodeBreached() { return (params) -> { int currIndexShardsOnNode = params.node.numShards(params.index); int allowedIndexShardsPerNode = (int) Math.ceil(params.balancer.avgShardsPerNode(params.index)); return (currIndexShardsOnNode >= allowedIndexShardsPerNode); }; } }
/** * Copyright (C) 2016-2019 Expedia Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hotels.road.rest.model; import java.time.Duration; import io.swagger.annotations.ApiModelProperty; import lombok.Data; @Data public class HiveDestinationModel { public static final String MINIMUM_DURATION_STRING = "PT5M"; public static final String MAXIMUM_DURATION_STRING = "P1D"; // Using Duration::parse instead of Duration::ofMinutes because LANDING_INTERVAL_DESCRIPTION needs to be a compile // time string so that it can be used in a annotation attribute. public static final Duration MINIMUM_DURATION = Duration.parse(MINIMUM_DURATION_STRING); public static final Duration MAXIMUM_DURATION = Duration.parse(MAXIMUM_DURATION_STRING); private static final String LANDING_INTERVAL_DESCRIPTION = "Specifies how often data is landed to Hive, defaults to \"PT1H\". The format is an ISO 8601 Duration, see https://en.wikipedia.org/wiki/ISO_8601#Durations. The value must fall between " + MINIMUM_DURATION_STRING + " and " + MAXIMUM_DURATION_STRING; @ApiModelProperty(name = "enabled", value = "Specifies if the destination is enabled.") private boolean enabled; @ApiModelProperty(name = "landingInterval", value = LANDING_INTERVAL_DESCRIPTION, example = "\"PT1H\"") private String landingInterval; }
package com.vimukti.accounter.web.client.ui.reports; import java.util.HashMap; import com.vimukti.accounter.web.client.core.ClientFinanceDate; import com.vimukti.accounter.web.client.core.reports.SalesByCustomerDetail; import com.vimukti.accounter.web.client.ui.Accounter; import com.vimukti.accounter.web.client.ui.UIUtils; import com.vimukti.accounter.web.client.ui.serverreports.PurchaseByVendorSummaryServerReport; public class PurchaseByVendorSummaryReport extends AbstractReportView<SalesByCustomerDetail> { public PurchaseByVendorSummaryReport() { this.serverReport = new PurchaseByVendorSummaryServerReport(this); } @Override public void OnRecordClick(SalesByCustomerDetail record) { record.setStartDate(toolbar.getStartDate()); record.setEndDate(toolbar.getEndDate()); record.setDateRange(toolbar.getSelectedDateRange()); UIUtils.runAction(record, PurchaseReportsAction.vendorDetail()); } @Override public int getToolbarType() { return TOOLBAR_TYPE_DATE_RANGE; } @Override public void makeReportRequest(ClientFinanceDate start, ClientFinanceDate end) { Accounter.createReportService().getPurchasesByVendorSummary(start, end, this); } @Override public void onEdit() { } @Override public void export(int generationType) { UIUtils.generateReport(generationType, startDate.getDate(), endDate.getDate(), 130); } @Override public void printPreview() { } public int sort(SalesByCustomerDetail obj1, SalesByCustomerDetail obj2, int col) { switch (col) { case 0: return obj1.getName().toLowerCase() .compareTo(obj2.getName().toLowerCase()); // case 1: // return obj1.getGroupName().toLowerCase().compareTo( // obj2.getGroupName().toLowerCase()); case 1: return UIUtils.compareDouble(obj1.getAmount(), obj2.getAmount()); } return 0; } @Override public void restoreView(HashMap<String, Object> map) { if (map == null || map.isEmpty()) { isDatesArranged = false; return; } ClientFinanceDate startDate = (ClientFinanceDate) map.get("startDate"); ClientFinanceDate endDate = (ClientFinanceDate) map.get("endDate"); this.serverReport.setStartAndEndDates(startDate, endDate); toolbar.setEndDate(endDate); toolbar.setStartDate(startDate); toolbar.setDefaultDateRange((String) map.get("selectedDateRange")); isDatesArranged = true; } @Override public HashMap<String, Object> saveView() { HashMap<String, Object> map = new HashMap<String, Object>(); String selectedDateRange = toolbar.getSelectedDateRange(); ClientFinanceDate startDate = toolbar.getStartDate(); ClientFinanceDate endDate = toolbar.getEndDate(); map.put("selectedDateRange", selectedDateRange); map.put("startDate", startDate); map.put("endDate", endDate); return map; } }
/******************************************************************************* * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.model.core; import java.util.ArrayList; import java.util.List; import org.eclipse.birt.report.model.elements.strategy.CopyPolicy; /** * A slot that contains an ordered list of elements. * */ public class MultiElementSlot extends ContainerSlot { /** * The ordered list of contents. */ public ArrayList<DesignElement> contents = new ArrayList<DesignElement>( ); /** * Makes a clone for this slot. The cloned slot contains all of the cloned * contents in the original slot. The relationship between content and * container is not kept. * <p> * If the content-container relationship needs to be kept, call * {@link ContainerSlot#copy(DesignElement, int)}. * * @return Object the cloned slot. * * @see java.lang.Object#clone() */ public Object doClone( CopyPolicy policy ) throws CloneNotSupportedException { MultiElementSlot slot = (MultiElementSlot) super.clone( ); slot.contents = new ArrayList<DesignElement>( ); for ( int i = 0; i < contents.size( ); i++ ) { DesignElement e = contents.get( i ); slot.contents.add( (DesignElement) e.doClone( policy ) ); } return slot; } /** * Finds the position of the given design element in the slot. * * @param content * the design element whose position needs to be returned. * @return the position of the given design element in the slot */ public int findPosn( DesignElement content ) { return contents.indexOf( content ); } /** * Inserts a design element into this slot with a given position number. The * caller must have validated that the element has not existed in this slot. * * @param element * design element which need to be inserted into slot. * @param posn * the zero-based integer number defines the inserted position in * the slot. */ public void insert( DesignElement element, int posn ) { assert !contents.contains( element ); assert posn >= 0 && posn <= contents.size( ); contents.add( posn, element ); } /** * Removes the design element in this slot. The removed element must existed * in this slot. * * @param element * design element to be removed. * */ public void remove( DesignElement element ) { assert contents.contains( element ); contents.remove( element ); // Flushing the containment stack is not required on remove for // two reasons. First, the elements are no longer accessible and // their properties will no longer be accessed. Second, if we // do add the element back into the containment hierarchy, we'll // flush the cache then. } /** * Removes an element at the given position. * * @param posn * position of the element that is to be removed. * @return the element that was removed from the list. */ public Object remove( int posn ) { assert posn >= 0 && posn < getCount( ); return contents.remove( posn ); } /** * Determines if the design element can be removed. * * @param element * design element * * @return true if the element existed in this slot. * */ public boolean canDrop( DesignElement element ) { return contents.contains( element ); } /** * Returns the contents list which were stored in the slot. * * @return the contents list which were stored in the slot. */ public List<DesignElement> getContents( ) { return contents; } /** * Returns the current size of the slot. * * @return current size of the slot. */ public int getCount( ) { return contents.size( ); } /** * * Moves the design element from position <code>from</code> to * <code>to</code> in this slot. * * @param from * the old position of the design element * @param to * the new position of the design element */ public void moveContent( int from, int to ) { assert from >= 0 && from < contents.size( ); assert to >= 0 && to < contents.size( ); if ( from == to ) return; DesignElement obj = contents.remove( from ); contents.add( to, obj ); } /** * Returns true if the design element existed in the slot. * * @param element * design element * @return true if the design element exists in this slot. */ public boolean contains( DesignElement element ) { return contents.contains( element ); } /** * Returns the design element stored in this slot at the given position * <code>posn</code>. * * @param posn * the integer number which defines the position in slot. * @return the design element stored in this slot at the given position * number. */ public DesignElement getContent( int posn ) { assert posn >= 0 && posn < contents.size( ); return contents.get( posn ); } /* * (non-Javadoc) * * @see org.eclipse.birt.report.model.core.ContainerSlot#clear() */ public void clear( ) { this.contents.clear( ); } }
package org.apache.cordova.firebase; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.media.RingtoneManager; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.NotificationCompat; import android.util.Log; import android.app.Notification; import android.text.TextUtils; import android.content.ContentResolver; import com.google.firebase.messaging.FirebaseMessagingService; import com.google.firebase.messaging.RemoteMessage; import java.util.Map; import java.util.Random; public class FirebasePluginMessagingService extends FirebaseMessagingService { private static final String TAG = "FirebasePlugin"; /** * Called when message is received. * * @param remoteMessage Object representing the message received from Firebase Cloud Messaging. */ @Override public void onMessageReceived(RemoteMessage remoteMessage) { // [START_EXCLUDE] // There are two types of messages data messages and notification messages. Data messages are handled // here in onMessageReceived whether the app is in the foreground or background. Data messages are the type // traditionally used with GCM. Notification messages are only received here in onMessageReceived when the app // is in the foreground. When the app is in the background an automatically generated notification is displayed. // When the user taps on the notification they are returned to the app. Messages containing both notification // and data payloads are treated as notification messages. The Firebase console always sends notification // messages. For more see: https://firebase.google.com/docs/cloud-messaging/concept-options // [END_EXCLUDE] // TODO(developer): Handle FCM messages here. // Not getting messages here? See why this may be: https://goo.gl/39bRNJ String title; String text; String id; String sound = null; if (remoteMessage.getNotification() != null) { title = remoteMessage.getNotification().getTitle(); text = remoteMessage.getNotification().getBody(); id = remoteMessage.getMessageId(); } else { title = remoteMessage.getData().get("title"); text = remoteMessage.getData().get("text"); id = remoteMessage.getData().get("id"); sound = remoteMessage.getData().get("sound"); if(TextUtils.isEmpty(text)){ text = remoteMessage.getData().get("body"); } } if(TextUtils.isEmpty(id)){ Random rand = new Random(); int n = rand.nextInt(50) + 1; id = Integer.toString(n); } Log.d(TAG, "From: " + remoteMessage.getFrom()); Log.d(TAG, "Notification Message id: " + id); Log.d(TAG, "Notification Message Title: " + title); Log.d(TAG, "Notification Message Body/Text: " + text); Log.d(TAG, "Notification Message Sound: " + sound); // TODO: Add option to developer to configure if show notification when app on foreground if (!TextUtils.isEmpty(text) || !TextUtils.isEmpty(title) || (!remoteMessage.getData().isEmpty())) { sendNotification(id, title, text, remoteMessage.getData()); } } private void sendNotification(String id, String title, String messageBody, Map<String, String> data) { Bundle bundle = new Bundle(); for (String key : data.keySet()) { bundle.putString(key, data.get(key)); } bundle.putBoolean("tap", false); FirebasePlugin.sendNotification(bundle); } }
/* * Copyright © 2009 Reinier Zwitserloot and Roel Spilker. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok.eclipse; import java.io.PrintStream; import java.lang.reflect.Modifier; import org.eclipse.jdt.internal.compiler.ast.AbstractMethodDeclaration; import org.eclipse.jdt.internal.compiler.ast.Annotation; import org.eclipse.jdt.internal.compiler.ast.Argument; import org.eclipse.jdt.internal.compiler.ast.Block; import org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration; import org.eclipse.jdt.internal.compiler.ast.ConstructorDeclaration; import org.eclipse.jdt.internal.compiler.ast.FieldDeclaration; import org.eclipse.jdt.internal.compiler.ast.Initializer; import org.eclipse.jdt.internal.compiler.ast.LocalDeclaration; import org.eclipse.jdt.internal.compiler.ast.Statement; import org.eclipse.jdt.internal.compiler.ast.TypeDeclaration; import org.eclipse.jdt.internal.compiler.ast.TypeReference; /** * Implement so you can ask any JavacAST.Node to traverse depth-first through all children, * calling the appropriate visit and endVisit methods. */ public interface EclipseASTVisitor { /** * Called at the very beginning and end. */ void visitCompilationUnit(EclipseNode top, CompilationUnitDeclaration unit); void endVisitCompilationUnit(EclipseNode top, CompilationUnitDeclaration unit); /** * Called when visiting a type (a class, interface, annotation, enum, etcetera). */ void visitType(EclipseNode typeNode, TypeDeclaration type); void visitAnnotationOnType(TypeDeclaration type, EclipseNode annotationNode, Annotation annotation); void endVisitType(EclipseNode typeNode, TypeDeclaration type); /** * Called when visiting a field of a class. * Even though in Eclipse initializers (both instance and static) are represented as Initializer objects, * which are a subclass of FieldDeclaration, those do NOT result in a call to this method. They result * in a call to the visitInitializer method. */ void visitField(EclipseNode fieldNode, FieldDeclaration field); void visitAnnotationOnField(FieldDeclaration field, EclipseNode annotationNode, Annotation annotation); void endVisitField(EclipseNode fieldNode, FieldDeclaration field); /** * Called for static and instance initializers. You can tell the difference via the modifier flag on the * ASTNode (8 for static, 0 for not static). The content is in the 'block', not in the 'initialization', * which would always be null for an initializer instance. */ void visitInitializer(EclipseNode initializerNode, Initializer initializer); void endVisitInitializer(EclipseNode initializerNode, Initializer initializer); /** * Called for both methods (MethodDeclaration) and constructors (ConstructorDeclaration), but not for * Clinit objects, which are a vestigial Eclipse thing that never contain anything. Static initializers * show up as 'Initializer', in the visitInitializer method, with modifier bit STATIC set. */ void visitMethod(EclipseNode methodNode, AbstractMethodDeclaration method); void visitAnnotationOnMethod(AbstractMethodDeclaration method, EclipseNode annotationNode, Annotation annotation); void endVisitMethod(EclipseNode methodNode, AbstractMethodDeclaration method); /** * Visits a method argument */ void visitMethodArgument(EclipseNode argNode, Argument arg, AbstractMethodDeclaration method); void visitAnnotationOnMethodArgument(Argument arg, AbstractMethodDeclaration method, EclipseNode annotationNode, Annotation annotation); void endVisitMethodArgument(EclipseNode argNode, Argument arg, AbstractMethodDeclaration method); /** * Visits a local declaration - that is, something like 'int x = 10;' on the method level. */ void visitLocal(EclipseNode localNode, LocalDeclaration local); void visitAnnotationOnLocal(LocalDeclaration local, EclipseNode annotationNode, Annotation annotation); void endVisitLocal(EclipseNode localNode, LocalDeclaration local); /** * Visits a statement that isn't any of the other visit methods (e.g. TypeDeclaration). */ void visitStatement(EclipseNode statementNode, Statement statement); void endVisitStatement(EclipseNode statementNode, Statement statement); /** * Prints the structure of an AST. */ public static class Printer implements EclipseASTVisitor { private final PrintStream out; private final boolean printContent; private int disablePrinting = 0; private int indent = 0; /** * @param printContent if true, bodies are printed directly, as java code, * instead of a tree listing of every AST node inside it. */ public Printer(boolean printContent) { this(printContent, System.out); } /** * @param printContent if true, bodies are printed directly, as java code, * instead of a tree listing of every AST node inside it. * @param out write output to this stream. You must close it yourself. flush() is called after every line. * * @see java.io.PrintStream#flush() */ public Printer(boolean printContent, PrintStream out) { this.printContent = printContent; this.out = out; } private void forcePrint(String text, Object... params) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < indent; i++) sb.append(" "); out.printf(sb.append(text).append('\n').toString(), params); out.flush(); } private void print(String text, Object... params) { if (disablePrinting == 0) forcePrint(text, params); } private String str(char[] c) { if (c == null) return "(NULL)"; return new String(c); } private String str(TypeReference type) { if (type == null) return "(NULL)"; char[][] c = type.getTypeName(); StringBuilder sb = new StringBuilder(); boolean first = true; for (char[] d : c) { sb.append(first ? "" : ".").append(new String(d)); first = false; } return sb.toString(); } public void visitCompilationUnit(EclipseNode node, CompilationUnitDeclaration unit) { out.println("---------------------------------------------------------"); out.println(node.isCompleteParse() ? "COMPLETE" : "incomplete"); print("<CUD %s%s>", node.getFileName(), Eclipse.isGenerated(unit) ? " (GENERATED)" : ""); indent++; } public void endVisitCompilationUnit(EclipseNode node, CompilationUnitDeclaration unit) { indent--; print("</CUD>"); } public void visitType(EclipseNode node, TypeDeclaration type) { print("<TYPE %s%s>", str(type.name), Eclipse.isGenerated(type) ? " (GENERATED)" : ""); indent++; if (printContent) { print("%s", type); disablePrinting++; } } public void visitAnnotationOnType(TypeDeclaration type, EclipseNode node, Annotation annotation) { forcePrint("<ANNOTATION%s: %s />", Eclipse.isGenerated(annotation) ? " (GENERATED)" : "", annotation); } public void endVisitType(EclipseNode node, TypeDeclaration type) { if (printContent) disablePrinting--; indent--; print("</TYPE %s>", str(type.name)); } public void visitInitializer(EclipseNode node, Initializer initializer) { Block block = initializer.block; boolean s = (block != null && block.statements != null); print("<%s INITIALIZER: %s%s>", (initializer.modifiers & Modifier.STATIC) != 0 ? "static" : "instance", s ? "filled" : "blank", Eclipse.isGenerated(initializer) ? " (GENERATED)" : ""); indent++; if (printContent) { if (initializer.block != null) print("%s", initializer.block); disablePrinting++; } } public void endVisitInitializer(EclipseNode node, Initializer initializer) { if (printContent) disablePrinting--; indent--; print("</%s INITIALIZER>", (initializer.modifiers & Modifier.STATIC) != 0 ? "static" : "instance"); } public void visitField(EclipseNode node, FieldDeclaration field) { print("<FIELD%s %s %s = %s>", Eclipse.isGenerated(field) ? " (GENERATED)" : "", str(field.type), str(field.name), field.initialization); indent++; if (printContent) { if (field.initialization != null) print("%s", field.initialization); disablePrinting++; } } public void visitAnnotationOnField(FieldDeclaration field, EclipseNode node, Annotation annotation) { forcePrint("<ANNOTATION%s: %s />", Eclipse.isGenerated(annotation) ? " (GENERATED)" : "", annotation); } public void endVisitField(EclipseNode node, FieldDeclaration field) { if (printContent) disablePrinting--; indent--; print("</FIELD %s %s>", str(field.type), str(field.name)); } public void visitMethod(EclipseNode node, AbstractMethodDeclaration method) { String type = method instanceof ConstructorDeclaration ? "CONSTRUCTOR" : "METHOD"; print("<%s %s: %s%s>", type, str(method.selector), method.statements != null ? "filled" : "blank", Eclipse.isGenerated(method) ? " (GENERATED)" : ""); indent++; if (printContent) { if (method.statements != null) print("%s", method); disablePrinting++; } } public void visitAnnotationOnMethod(AbstractMethodDeclaration method, EclipseNode node, Annotation annotation) { forcePrint("<ANNOTATION%s: %s />", Eclipse.isGenerated(method) ? " (GENERATED)" : "", annotation); } public void endVisitMethod(EclipseNode node, AbstractMethodDeclaration method) { if (printContent) disablePrinting--; String type = method instanceof ConstructorDeclaration ? "CONSTRUCTOR" : "METHOD"; indent--; print("</%s %s>", type, str(method.selector)); } public void visitMethodArgument(EclipseNode node, Argument arg, AbstractMethodDeclaration method) { print("<METHODARG%s %s %s = %s>", Eclipse.isGenerated(arg) ? " (GENERATED)" : "", str(arg.type), str(arg.name), arg.initialization); indent++; } public void visitAnnotationOnMethodArgument(Argument arg, AbstractMethodDeclaration method, EclipseNode node, Annotation annotation) { print("<ANNOTATION%s: %s />", Eclipse.isGenerated(annotation) ? " (GENERATED)" : "", annotation); } public void endVisitMethodArgument(EclipseNode node, Argument arg, AbstractMethodDeclaration method) { indent--; print("</METHODARG %s %s>", str(arg.type), str(arg.name)); } public void visitLocal(EclipseNode node, LocalDeclaration local) { print("<LOCAL%s %s %s = %s>", Eclipse.isGenerated(local) ? " (GENERATED)" : "", str(local.type), str(local.name), local.initialization); indent++; } public void visitAnnotationOnLocal(LocalDeclaration local, EclipseNode node, Annotation annotation) { print("<ANNOTATION%s: %s />", Eclipse.isGenerated(annotation) ? " (GENERATED)" : "", annotation); } public void endVisitLocal(EclipseNode node, LocalDeclaration local) { indent--; print("</LOCAL %s %s>", str(local.type), str(local.name)); } public void visitStatement(EclipseNode node, Statement statement) { print("<%s%s>", statement.getClass(), Eclipse.isGenerated(statement) ? " (GENERATED)" : ""); indent++; print("%s", statement); } public void endVisitStatement(EclipseNode node, Statement statement) { indent--; print("</%s>", statement.getClass()); } } }
package com.bk.junit; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.when; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) public class TestMockitoInjection { private BoringService service; public TestMockitoInjection(@Mock BoringService service) { this.service = service; } @Test public void testConstructorInjectedValue() { when(service.returnNumber()).thenReturn(2); assertEquals(2, service.returnNumber()); } @Test public void testMethodInjection(@Mock BoringService service) { when(service.returnNumber()).thenReturn(3); assertEquals(3, service.returnNumber()); } public class BoringService { public int returnNumber() { return 1; } } }
package com.iknowyou.domain.patientDetail; import static org.junit.Assert.*; import java.util.List; import org.junit.Before; import org.junit.Test; import com.iknowyou.watsonclient.WatsonKeywordsService; public class WatsonResponseTest { WatsonKeywordsService wqs; @Before public void setUp() throws Exception { wqs = new WatsonKeywordsService(); } @Test public void test() { final List<String> keywors = wqs.getKeywords("The patients complains about continuous headache after being hit by a falling tree. Took on Aspirin."); assertTrue(keywors.size() > 0); } }
package stsjorbsmod.cards; import com.megacrit.cardcrawl.actions.common.GainBlockAction; import com.megacrit.cardcrawl.characters.AbstractPlayer; import com.megacrit.cardcrawl.monsters.AbstractMonster; import stsjorbsmod.JorbsMod; import stsjorbsmod.actions.GainMemoryClarityAction; import stsjorbsmod.actions.RememberSpecificMemoryAction; import stsjorbsmod.characters.Wanderer; import stsjorbsmod.memories.DiligenceMemory; import static stsjorbsmod.JorbsMod.makeCardPath; import static stsjorbsmod.characters.Wanderer.Enums.REMEMBER_MEMORY; public class DoubleCheck extends CustomJorbsModCard { public static final String ID = JorbsMod.makeID(DoubleCheck.class.getSimpleName()); public static final String IMG = makeCardPath("Block_Commons/double_check.png"); private static final CardRarity RARITY = CardRarity.COMMON; private static final CardTarget TARGET = CardTarget.SELF; private static final CardType TYPE = CardType.SKILL; public static final CardColor COLOR = Wanderer.Enums.COLOR_GRAY; private static final int COST = 1; private static final int BLOCK = 5; private static final int UPGRADE_PLUS_BLOCK = 3; public DoubleCheck() { super(ID, IMG, COST, TYPE, COLOR, RARITY, TARGET); block = baseBlock = BLOCK; this.tags.add(REMEMBER_MEMORY); } @Override public void use(AbstractPlayer p, AbstractMonster m) { enqueueAction(new GainBlockAction(p, p, block)); enqueueAction(new GainMemoryClarityAction(p, DiligenceMemory.STATIC.ID)); enqueueAction(new RememberSpecificMemoryAction(new DiligenceMemory(p, false))); } @Override public void upgrade() { if (!upgraded) { upgradeName(); upgradeBlock(UPGRADE_PLUS_BLOCK); initializeDescription(); } } }
package jopenvr; import com.sun.jna.Callback; import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.ptr.FloatByReference; import java.util.Arrays; import java.util.List; /** * This file was autogenerated by <a href="http://jnaerator.googlecode.com/">JNAerator</a>,<br> * a tool written by <a href="http://ochafik.com/">Olivier Chafik</a> that <a href="http://code.google.com/p/jnaerator/wiki/CreditsAndLicense">uses a few opensource projects.</a>.<br> * For help, please visit <a href="http://nativelibs4java.googlecode.com/">NativeLibs4Java</a> , <a href="http://rococoa.dev.java.net/">Rococoa</a>, or <a href="http://jna.dev.java.net/">JNA</a>. */ public class VR_IVRChaperone_FnTable extends AlignedStructure { public VR_IVRChaperone_FnTable.GetCalibrationState_callback GetCalibrationState; public VR_IVRChaperone_FnTable.GetPlayAreaSize_callback GetPlayAreaSize; public VR_IVRChaperone_FnTable.GetPlayAreaRect_callback GetPlayAreaRect; public VR_IVRChaperone_FnTable.ReloadInfo_callback ReloadInfo; public VR_IVRChaperone_FnTable.SetSceneColor_callback SetSceneColor; public VR_IVRChaperone_FnTable.GetBoundsColor_callback GetBoundsColor; public VR_IVRChaperone_FnTable.AreBoundsVisible_callback AreBoundsVisible; public VR_IVRChaperone_FnTable.ForceBoundsVisible_callback ForceBoundsVisible; public interface GetCalibrationState_callback extends Callback { int apply(); }; public interface GetPlayAreaSize_callback extends Callback { byte apply(FloatByReference pSizeX, FloatByReference pSizeZ); }; public interface GetPlayAreaRect_callback extends Callback { byte apply(HmdQuad_t rect); }; public interface ReloadInfo_callback extends Callback { void apply(); }; public interface SetSceneColor_callback extends Callback { void apply(HmdColor_t.ByValue color); }; public interface GetBoundsColor_callback extends Callback { void apply(HmdColor_t pOutputColorArray, int nNumOutputColors, float flCollisionBoundsFadeDistance, HmdColor_t pOutputCameraColor); }; public interface AreBoundsVisible_callback extends Callback { byte apply(); }; public interface ForceBoundsVisible_callback extends Callback { void apply(byte bForce); }; public VR_IVRChaperone_FnTable() { super(); } protected List<? > getFieldOrder() { return Arrays.asList("GetCalibrationState", "GetPlayAreaSize", "GetPlayAreaRect", "ReloadInfo", "SetSceneColor", "GetBoundsColor", "AreBoundsVisible", "ForceBoundsVisible"); } public VR_IVRChaperone_FnTable(VR_IVRChaperone_FnTable.GetCalibrationState_callback GetCalibrationState, VR_IVRChaperone_FnTable.GetPlayAreaSize_callback GetPlayAreaSize, VR_IVRChaperone_FnTable.GetPlayAreaRect_callback GetPlayAreaRect, VR_IVRChaperone_FnTable.ReloadInfo_callback ReloadInfo, VR_IVRChaperone_FnTable.SetSceneColor_callback SetSceneColor, VR_IVRChaperone_FnTable.GetBoundsColor_callback GetBoundsColor, VR_IVRChaperone_FnTable.AreBoundsVisible_callback AreBoundsVisible, VR_IVRChaperone_FnTable.ForceBoundsVisible_callback ForceBoundsVisible) { super(); this.GetCalibrationState = GetCalibrationState; this.GetPlayAreaSize = GetPlayAreaSize; this.GetPlayAreaRect = GetPlayAreaRect; this.ReloadInfo = ReloadInfo; this.SetSceneColor = SetSceneColor; this.GetBoundsColor = GetBoundsColor; this.AreBoundsVisible = AreBoundsVisible; this.ForceBoundsVisible = ForceBoundsVisible; } public VR_IVRChaperone_FnTable(Pointer peer) { super(peer); } public static class ByReference extends VR_IVRChaperone_FnTable implements Structure.ByReference { }; public static class ByValue extends VR_IVRChaperone_FnTable implements Structure.ByValue { }; }
package com.rkg.springcloud; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.netflix.eureka.EnableEurekaClient; import org.springframework.cloud.netflix.feign.EnableFeignClients; import org.springframework.context.annotation.ComponentScan; @SpringBootApplication @EnableEurekaClient @EnableFeignClients(basePackages= {"com.rkg.springcloud"}) @ComponentScan("com.rkg.springcloud") public class DeptConsumer80_Feign_App { public static void main(String[] args) { SpringApplication.run(DeptConsumer80_Feign_App.class, args); } }
/* * Copyright (c) Open Connectivity Foundation (OCF), AllJoyn Open Source * Project (AJOSP) Contributors and others. * * SPDX-License-Identifier: Apache-2.0 * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution, and is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Copyright (c) Open Connectivity Foundation and Contributors to AllSeen * Alliance. All rights reserved. * * Permission to use, copy, modify, and/or distribute this software for * any purpose with or without fee is hereby granted, provided that the * above copyright notice and this permission notice appear in all * copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE * AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR * PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR * PERFORMANCE OF THIS SOFTWARE. */ package org.allseen.lsf.sdk.listener; import org.allseen.lsf.sdk.TrackingID; /** * <b>WARNING: This class is not intended to be used by clients, and its interface may change * in subsequent releases of the SDK</b>. */ public interface PresetCollectionListener<PRESET, ERROR> extends LightingListener { public void onPresetInitialized(TrackingID trackingId, PRESET preset); public void onPresetChanged(PRESET preset); public void onPresetRemoved(PRESET preset); public void onPresetError(ERROR error); }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.util; public class PrintWriterPrinter implements android.util.Printer { public PrintWriterPrinter(java.io.PrintWriter pw) { throw new RuntimeException("Stub!"); } public void println(java.lang.String x) { throw new RuntimeException("Stub!"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.common.cloud; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; /** * ZkACLProvider that gives all permissions for the user specified in System * property "solr.authorization.superuser" (default: "solr") when using sasl, * and gives read permissions for anyone else. Designed for a setup where * configurations have already been set up and will not be modified, or * where configuration changes are controlled via Solr APIs. */ public class SaslZkACLProvider extends SecurityAwareZkACLProvider { private static final String superUser = System.getProperty("solr.authorization.superuser", "solr"); @Override protected List<ACL> createNonSecurityACLsToAdd() { return Arrays.asList( new ACL(ZooDefs.Perms.ALL, new Id("sasl", superUser)), new ACL(ZooDefs.Perms.READ, ZooDefs.Ids.ANYONE_ID_UNSAFE) ); } @Override protected List<ACL> createSecurityACLsToAdd() { return Collections.singletonList(new ACL(ZooDefs.Perms.ALL, new Id("sasl", superUser))); } }
/* * Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ package org.antlr.v4.test.runtime.cpp; import org.antlr.v4.test.runtime.BaseRuntimeTest; import org.antlr.v4.test.runtime.RuntimeTestDescriptor; import org.antlr.v4.test.runtime.descriptors.FullContextParsingDescriptors; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @RunWith(Parameterized.class) public class TestFullContextParsing extends BaseRuntimeTest { public TestFullContextParsing(RuntimeTestDescriptor descriptor) { super(descriptor,new BaseCppTest()); } @Parameterized.Parameters(name="{0}") public static RuntimeTestDescriptor[] getAllTestDescriptors() { return BaseRuntimeTest.getRuntimeTestDescriptors(FullContextParsingDescriptors.class, "Cpp"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.concurrent.atomic.AtomicReference; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteTransactions; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxFastFinishFuture; import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx; import org.apache.ignite.internal.processors.cache.transactions.TransactionProxyImpl; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE; /** * */ public class CacheTxFastFinishTest extends GridCommonAbstractTest { /** */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** */ private boolean client; /** */ private boolean nearCache; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(IP_FINDER); CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME); ccfg.setCacheMode(PARTITIONED); ccfg.setAtomicityMode(TRANSACTIONAL); ccfg.setBackups(1); ccfg.setWriteSynchronizationMode(FULL_SYNC); if (nearCache) ccfg.setNearConfiguration(new NearCacheConfiguration()); cfg.setCacheConfiguration(ccfg); cfg.setClientMode(client); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); stopAllGrids(); } /** * @throws Exception If failed. */ public void testFastFinishTxNearCache() throws Exception { nearCache = true; fastFinishTx(); } /** * @throws Exception If failed. */ public void testFastFinishTx() throws Exception { fastFinishTx(); } /** * @throws Exception If failed. */ private void fastFinishTx() throws Exception { startGrid(0); fastFinishTx(ignite(0)); client = true; startGrid(1); for (int i = 0; i < 2; i++) fastFinishTx(ignite(i)); client = false; startGrid(2); for (int i = 0; i < 3; i++) fastFinishTx(ignite(i)); startGrid(3); for (int i = 0; i < 4; i++) fastFinishTx(ignite(i)); stopGrid(1); for (int i = 0; i < 4; i++) { if (i != 1) fastFinishTx(ignite(i)); } } /** * @param ignite Node. */ private void fastFinishTx(Ignite ignite) { IgniteTransactions txs = ignite.transactions(); IgniteCache cache = ignite.cache(DEFAULT_CACHE_NAME); for (boolean commit : new boolean[]{true, false}) { for (TransactionConcurrency c : TransactionConcurrency.values()) { for (TransactionIsolation isolation : TransactionIsolation.values()) { try (Transaction tx = txs.txStart(c, isolation)) { checkFastTxFinish(tx, commit); } } } for (int i = 0; i < 100; i++) { try (Transaction tx = txs.txStart(OPTIMISTIC, REPEATABLE_READ)) { cache.get(i); checkFastTxFinish(tx, commit); } try (Transaction tx = txs.txStart(OPTIMISTIC, READ_COMMITTED)) { cache.get(i); checkFastTxFinish(tx, commit); } } for (int i = 0; i < 100; i++) { try (Transaction tx = txs.txStart(OPTIMISTIC, SERIALIZABLE)) { cache.get(i); checkNormalTxFinish(tx, commit); } try (Transaction tx = txs.txStart(PESSIMISTIC, REPEATABLE_READ)) { cache.get(i); checkNormalTxFinish(tx, commit); } } for (int i = 0; i < 100; i++) { for (TransactionConcurrency c : TransactionConcurrency.values()) { for (TransactionIsolation isolation : TransactionIsolation.values()) { try (Transaction tx = txs.txStart(c, isolation)) { cache.put(i, i); checkNormalTxFinish(tx, commit); } } } } } } /** * @param tx Transaction. * @param commit Commit flag. */ private void checkFastTxFinish(Transaction tx, boolean commit) { if (commit) tx.commit(); else tx.rollback(); IgniteInternalTx tx0 = ((TransactionProxyImpl)tx).tx(); assertNull(fieldValue(tx0, "prepFut")); assertTrue(fieldValue(tx0, "finishFut") instanceof GridNearTxFastFinishFuture); } /** * @param tx Transaction. * @param commit Commit flag. */ private void checkNormalTxFinish(Transaction tx, boolean commit) { IgniteInternalTx tx0 = ((TransactionProxyImpl)tx).tx(); if (commit) { tx.commit(); assertNotNull(fieldValue(tx0, "prepFut")); assertNotNull(fieldValue(tx0, "finishFut")); } else { tx.rollback(); assertNull(fieldValue(tx0, "prepFut")); assertNotNull(fieldValue(tx0, "finishFut")); } } /** * @param obj Obejct. * @param fieldName Field name. * @return Field value. */ private Object fieldValue(Object obj, String fieldName) { Object val = GridTestUtils.getFieldValue(obj, fieldName); if (val == null) return null; if (val instanceof AtomicReference) return ((AtomicReference)val).get(); return val; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package helloworld; import org.osoa.sca.annotations.Remotable; import commonj.sdo.DataObject; /** * The interface for the helloworld service */ @Remotable public interface HelloWorldService { public String getGreetings(DataObject name); }
package at.favre.lib.crypto.bcrypt; import at.favre.lib.bytes.Bytes; import org.junit.Before; import org.junit.Test; import java.nio.charset.StandardCharsets; import static org.junit.Assert.*; public class BCryptParserTest { private BCryptParser parser; @Before public void setUp() { parser = new BCryptParser.Default(new Radix64Encoder.Default(), StandardCharsets.UTF_8); } @Test public void parseDifferentCostFactors() throws Exception { for (int cost = 4; cost < 10; cost++) { byte[] salt = Bytes.random(16).array(); byte[] hash = BCrypt.withDefaults().hash(cost, salt, "12345".getBytes()); BCrypt.HashData parts = parser.parse(hash); assertEquals(cost, parts.cost); assertEquals(BCrypt.Version.VERSION_2A, parts.version); assertArrayEquals(salt, parts.rawSalt); assertEquals(23, parts.rawHash.length); System.out.println(parts); } } @Test public void parseDifferentVersions() throws Exception { for (BCrypt.Version version : BCrypt.Version.SUPPORTED_VERSIONS) { byte[] salt = Bytes.random(16).array(); byte[] hash = BCrypt.with(version).hash(6, salt, "hs61i1oAJhdasdÄÄ".getBytes(StandardCharsets.UTF_8)); BCrypt.HashData parts = parser.parse(hash); assertEquals(version, parts.version); assertEquals(6, parts.cost); assertArrayEquals(salt, parts.rawSalt); assertEquals(23, parts.rawHash.length); System.out.println(parts); } } @Test public void parseDoubleDigitCost() throws Exception { byte[] salt = Bytes.random(16).array(); byte[] hash = BCrypt.with(BCrypt.Version.VERSION_2A).hash(11, salt, "i27ze8172eaidh asdhsd".getBytes(StandardCharsets.UTF_8)); BCrypt.HashData parts = parser.parse(hash); assertEquals(BCrypt.Version.VERSION_2A, parts.version); assertEquals(11, parts.cost); assertArrayEquals(salt, parts.rawSalt); assertEquals(23, parts.rawHash.length); System.out.println(parts); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingVersion() throws Exception { parser.parse("$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingLeadingZero() throws Exception { parser.parse("$2a$6$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingSeparator() throws Exception { parser.parse("$2a$06If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingSeparator2() throws Exception { parser.parse("$2a06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorInvalidVersion() throws Exception { parser.parse("$2$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorInvalidVersion2() throws Exception { parser.parse("$3a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorInvalidVersion3() throws Exception { parser.parse("$2l$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingSaltAndHas() throws Exception { parser.parse("$2a$06$".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingHash() throws Exception { parser.parse("$2a$06$If6bvum7DFjUnE9p2uDeDu".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorMissingChar() throws Exception { parser.parse("$2a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0".getBytes()); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorTooLong() throws Exception { parser.parse("$2a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i9".getBytes()); } @Test(expected = IllegalArgumentException.class) public void parseErrorNullHash() throws Exception { parser.parse(null); } @Test(expected = IllegalArgumentException.class) public void parseErrorZeroLengthHash() throws Exception { parser.parse(new byte[0]); } @Test(expected = IllegalBCryptFormatException.class) public void parseErrorWayTooShort() throws Exception { parser.parse("$2a".getBytes()); } @Test public void parseErrorTooLongGetExceptionMessage() { try { parser.parse("$2a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i9".getBytes()); fail(); } catch (IllegalBCryptFormatException e) { assertNotNull(e.getMessage()); assertTrue(e.getMessage().length() > 20); System.out.println(e.getMessage()); } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.cassandra.db.commitlog; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Map; import java.util.Properties; import java.util.TimeZone; import java.util.concurrent.*; import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.compress.CompressionParameters; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.WrappedRunnable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Strings; public class CommitLogArchiver { private static final Logger logger = LoggerFactory.getLogger(CommitLogArchiver.class); public static final SimpleDateFormat format = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss"); private static final String DELIMITER = ","; static { format.setTimeZone(TimeZone.getTimeZone("GMT")); } public final Map<String, Future<?>> archivePending = new ConcurrentHashMap<String, Future<?>>(); private final ExecutorService executor = new JMXEnabledThreadPoolExecutor("CommitLogArchiver"); final String archiveCommand; final String restoreCommand; final String restoreDirectories; public final long restorePointInTime; public final TimeUnit precision; public CommitLogArchiver() { Properties commitlog_commands = new Properties(); InputStream stream = null; try { stream = getClass().getClassLoader().getResourceAsStream("commitlog_archiving.properties"); if (stream == null) { logger.debug("No commitlog_archiving properties found; archive + pitr will be disabled"); archiveCommand = null; restoreCommand = null; restoreDirectories = null; restorePointInTime = Long.MAX_VALUE; precision = TimeUnit.MICROSECONDS; } else { commitlog_commands.load(stream); archiveCommand = commitlog_commands.getProperty("archive_command"); restoreCommand = commitlog_commands.getProperty("restore_command"); restoreDirectories = commitlog_commands.getProperty("restore_directories"); if (restoreDirectories != null && !restoreDirectories.isEmpty()) { for (String dir : restoreDirectories.split(DELIMITER)) { File directory = new File(dir); if (!directory.exists()) { if (!directory.mkdir()) { throw new RuntimeException("Unable to create directory: " + dir); } } } } String targetTime = commitlog_commands.getProperty("restore_point_in_time"); precision = TimeUnit.valueOf(commitlog_commands.getProperty("precision", "MICROSECONDS")); try { restorePointInTime = Strings.isNullOrEmpty(targetTime) ? Long.MAX_VALUE : format.parse(targetTime).getTime(); } catch (ParseException e) { throw new RuntimeException("Unable to parse restore target time", e); } } } catch (IOException e) { throw new RuntimeException("Unable to load commitlog_archiving.properties", e); } finally { FileUtils.closeQuietly(stream); } } public void maybeArchive(final CommitLogSegment segment) { if (Strings.isNullOrEmpty(archiveCommand)) return; archivePending.put(segment.getName(), executor.submit(new WrappedRunnable() { protected void runMayThrow() throws IOException { segment.waitForFinalSync(); String command = archiveCommand.replace("%name", segment.getName()); command = command.replace("%path", segment.getPath()); exec(command); } })); } /** * Differs from the above because it can be used on any file, rather than only * managed commit log segments (and thus cannot call waitForFinalSync). * * Used to archive files present in the commit log directory at startup (CASSANDRA-6904) */ public void maybeArchive(final String path, final String name) { if (Strings.isNullOrEmpty(archiveCommand)) return; archivePending.put(name, executor.submit(new WrappedRunnable() { protected void runMayThrow() throws IOException { String command = archiveCommand.replace("%name", name); command = command.replace("%path", path); exec(command); } })); } public boolean maybeWaitForArchiving(String name) { Future<?> f = archivePending.remove(name); if (f == null) return true; // archiving disabled try { f.get(); } catch (InterruptedException e) { throw new AssertionError(e); } catch (ExecutionException e) { if (e.getCause() instanceof IOException) { logger.error("Looks like the archiving of file {} failed earlier, cassandra is going to ignore this segment for now.", name); return false; } throw new RuntimeException(e); } return true; } public void maybeRestoreArchive() { if (Strings.isNullOrEmpty(restoreDirectories)) return; for (String dir : restoreDirectories.split(DELIMITER)) { File[] files = new File(dir).listFiles(); if (files == null) { throw new RuntimeException("Unable to list directory " + dir); } for (File fromFile : files) { CommitLogDescriptor fromHeader = CommitLogDescriptor.fromHeader(fromFile); CommitLogDescriptor fromName = CommitLogDescriptor.isValid(fromFile.getName()) ? CommitLogDescriptor.fromFileName(fromFile.getName()) : null; CommitLogDescriptor descriptor; if (fromHeader == null && fromName == null) throw new IllegalStateException("Cannot safely construct descriptor for segment, either from its name or its header: " + fromFile.getPath()); else if (fromHeader != null && fromName != null && !fromHeader.equalsIgnoringCompression(fromName)) throw new IllegalStateException(String.format("Cannot safely construct descriptor for segment, as name and header descriptors do not match (%s vs %s): %s", fromHeader, fromName, fromFile.getPath())); else if (fromName != null && fromHeader == null && fromName.version >= CommitLogDescriptor.VERSION_21) throw new IllegalStateException("Cannot safely construct descriptor for segment, as name descriptor implies a version that should contain a header descriptor, but that descriptor could not be read: " + fromFile.getPath()); else if (fromHeader != null) descriptor = fromHeader; else descriptor = fromName; if (descriptor.version > CommitLogDescriptor.VERSION_30) throw new IllegalStateException("Unsupported commit log version: " + descriptor.version); if (descriptor.compression != null) { try { CompressionParameters.createCompressor(descriptor.compression); } catch (ConfigurationException e) { throw new IllegalStateException("Unknown compression", e); } } File toFile = new File(DatabaseDescriptor.getCommitLogLocation(), descriptor.fileName()); if (toFile.exists()) { logger.debug("Skipping restore of archive {} as the segment already exists in the restore location {}", fromFile.getPath(), toFile.getPath()); continue; } String command = restoreCommand.replace("%from", fromFile.getPath()); command = command.replace("%to", toFile.getPath()); try { exec(command); } catch (IOException e) { throw new RuntimeException(e); } } } } private void exec(String command) throws IOException { ProcessBuilder pb = new ProcessBuilder(command.split(" ")); pb.redirectErrorStream(true); FBUtilities.exec(pb); } }
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2013, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners.] * * ----------------------------- * PieSectionLabelGenerator.java * ----------------------------- * (C) Copyright 2001-2008, by Object Refinery Limited. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): -; * * Changes * ------- * 13-Dec-2001 : Version 1 (DG); * 16-Jan-2002 : Completed Javadocs (DG); * 26-Sep-2002 : Fixed errors reported by Checkstyle (DG); * 30-Oct-2002 : Category is now a Comparable instance (DG); * 07-Mar-2003 : Changed to KeyedValuesDataset and added pieIndex * parameter (DG); * 21-Mar-2003 : Updated Javadocs (DG); * 24-Apr-2003 : Switched around PieDataset and KeyedValuesDataset (DG); * 13-Aug-2003 : Added clone() method (DG); * 19-Aug-2003 : Renamed PieToolTipGenerator --> PieItemLabelGenerator (DG); * 11-Nov-2003 : Removed clone() method (DG); * 30-Jan-2004 : Added generateSectionLabel() method (DG); * 15-Apr-2004 : Moved generateToolTip() method into separate interface and * renamed this interface PieSectionLabelGenerator (DG); * */ package org.jfree.chart.labels; import java.awt.Font; import java.awt.Paint; import java.awt.font.TextAttribute; import java.text.AttributedString; import org.jfree.data.general.PieDataset; /** * Interface for a label generator for plots that use data from * a {@link PieDataset}. */ public interface PieSectionLabelGenerator { /** * Generates a label for a pie section. * * @param dataset the dataset (<code>null</code> not permitted). * @param key the section key (<code>null</code> not permitted). * * @return The label (possibly <code>null</code>). */ public String generateSectionLabel(PieDataset dataset, Comparable key); /** * Generates an attributed label for the specified series, or * <code>null</code> if no attributed label is available (in which case, * the string returned by * {@link #generateSectionLabel(PieDataset, Comparable)} will * provide the fallback). Only certain attributes are recognised by the * code that ultimately displays the labels: * <ul> * <li>{@link TextAttribute#FONT}: will set the font;</li> * <li>{@link TextAttribute#POSTURE}: a value of * {@link TextAttribute#POSTURE_OBLIQUE} will add {@link Font#ITALIC} to * the current font;</li> * <li>{@link TextAttribute#WEIGHT}: a value of * {@link TextAttribute#WEIGHT_BOLD} will add {@link Font#BOLD} to the * current font;</li> * <li>{@link TextAttribute#FOREGROUND}: this will set the {@link Paint} * for the current</li> * <li>{@link TextAttribute#SUPERSCRIPT}: the values * {@link TextAttribute#SUPERSCRIPT_SUB} and * {@link TextAttribute#SUPERSCRIPT_SUPER} are recognised.</li> * </ul> * * @param dataset the dataset. * @param key the key. * * @return An attributed label (possibly <code>null</code>). */ public AttributedString generateAttributedSectionLabel(PieDataset dataset, Comparable key); }
package com.java110.api.smo.community; import com.java110.core.context.IPageData; import com.java110.utils.exception.SMOException; import org.springframework.http.ResponseEntity; /** * 小区管理服务接口类 * * add by wuxw 2019-06-29 */ public interface IListCommunitysSMO { /** * 查询小区信息 * @param pd 页面数据封装 * @return ResponseEntity 对象数据 * @throws SMOException 业务代码层 */ ResponseEntity<String> listCommunitys(IPageData pd) throws SMOException; }
package com.intuit.karate; /** * * @author pthomas3 */ public class AssertionResult { protected final String message; protected final boolean pass; public static final AssertionResult PASS = new AssertionResult(true, null); private AssertionResult(boolean pass, String message) { this.pass = pass; this.message = message; } public static AssertionResult fail(String message) { return new AssertionResult(false, message); } @Override public String toString() { return pass ? "passed" : "assertion failed: " + message; } }
/* * Copyright Ningbo Qishan Technology Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mds.group.purchase.constant; /** * The interface Http Constant. * * @author pavawi */ public interface HttpConstant { /** * The Constant VERSION. */ String VERSION = "1.0.5"; /** * The Constant BASE_URL. */ String BASE_URL = "https://login.weixin.qq.com"; /** * The Constant GET. */ String GET = "GET"; /** * The Constant GROUP_BR. */ String GROUP_BR = ":<br/>"; /** * The Constant GROUP_IDENTIFY. */ String GROUP_IDENTIFY = "@@"; /** * The Constant LOCATION_IDENTIFY. */ String LOCATION_IDENTIFY = "/cgi-bin/mmwebwx-bin/webwxgetpubliclinkimg?url="; /** * The Constant USER_AGENT. */ String USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) " + "Chrome/63.0.3239.132 Safari/537.36"; }
package tierability.item.tool.base; import net.minecraft.client.item.TooltipContext; import net.minecraft.item.ItemStack; import net.minecraft.item.ToolMaterial; import net.minecraft.text.Text; import net.minecraft.text.TranslatableText; import net.minecraft.world.World; import org.jetbrains.annotations.Nullable; import ru.bclib.items.tool.BaseHoeItem; import tierability.item.tool.TierabilityTools; import java.util.List; public class CustomHoeItem extends BaseHoeItem { public CustomHoeItem(ToolMaterial material, int attackDamage, float attackSpeed, Settings settings) { super(material, attackDamage, attackSpeed, settings); } @Override public void appendTooltip(ItemStack stack, @Nullable World world, List<Text> tooltip, TooltipContext context) { tooltip.add(new TranslatableText("item.tierability.hoe.tooltip")); if(stack.isOf(TierabilityTools.T1_ELECTRO_HOE)){ tooltip.add((new TranslatableText("item.tierability.electro"))); } if(stack.isOf(TierabilityTools.T2_ELECTRO_HOE)){ tooltip.add((new TranslatableText("item.tierability.electro_paralyse"))); } if(stack.isOf(TierabilityTools.T1_FLAME_HOE) || stack.isOf(TierabilityTools.T2_FLAME_HOE)){ tooltip.add((new TranslatableText("item.tierability.fire"))); } super.appendTooltip(stack, world, tooltip, context); } }
/* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.globalaccelerator.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/globalaccelerator-2018-08-08/DescribeCustomRoutingAccelerator" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeCustomRoutingAcceleratorRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon Resource Name (ARN) of the accelerator to describe. * </p> */ private String acceleratorArn; /** * <p> * The Amazon Resource Name (ARN) of the accelerator to describe. * </p> * * @param acceleratorArn * The Amazon Resource Name (ARN) of the accelerator to describe. */ public void setAcceleratorArn(String acceleratorArn) { this.acceleratorArn = acceleratorArn; } /** * <p> * The Amazon Resource Name (ARN) of the accelerator to describe. * </p> * * @return The Amazon Resource Name (ARN) of the accelerator to describe. */ public String getAcceleratorArn() { return this.acceleratorArn; } /** * <p> * The Amazon Resource Name (ARN) of the accelerator to describe. * </p> * * @param acceleratorArn * The Amazon Resource Name (ARN) of the accelerator to describe. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeCustomRoutingAcceleratorRequest withAcceleratorArn(String acceleratorArn) { setAcceleratorArn(acceleratorArn); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAcceleratorArn() != null) sb.append("AcceleratorArn: ").append(getAcceleratorArn()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeCustomRoutingAcceleratorRequest == false) return false; DescribeCustomRoutingAcceleratorRequest other = (DescribeCustomRoutingAcceleratorRequest) obj; if (other.getAcceleratorArn() == null ^ this.getAcceleratorArn() == null) return false; if (other.getAcceleratorArn() != null && other.getAcceleratorArn().equals(this.getAcceleratorArn()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAcceleratorArn() == null) ? 0 : getAcceleratorArn().hashCode()); return hashCode; } @Override public DescribeCustomRoutingAcceleratorRequest clone() { return (DescribeCustomRoutingAcceleratorRequest) super.clone(); } }
/** * This code was generated by * \ / _ _ _| _ _ * | (_)\/(_)(_|\/| |(/_ v1.0.0 * / / */ package com.twilio.rest.trusthub.v1.customerprofiles; import com.fasterxml.jackson.databind.ObjectMapper; import com.twilio.Twilio; import com.twilio.converter.DateConverter; import com.twilio.converter.Promoter; import com.twilio.exception.TwilioException; import com.twilio.http.HttpMethod; import com.twilio.http.Request; import com.twilio.http.Response; import com.twilio.http.TwilioRestClient; import com.twilio.rest.Domains; import mockit.Mocked; import mockit.NonStrictExpectations; import org.junit.Before; import org.junit.Test; import java.net.URI; import static com.twilio.TwilioTest.serialize; import static org.junit.Assert.*; public class CustomerProfilesEvaluationsTest { @Mocked private TwilioRestClient twilioRestClient; @Before public void setUp() throws Exception { Twilio.init("AC123", "AUTH TOKEN"); } @Test public void testCreateRequest() { new NonStrictExpectations() {{ Request request = new Request(HttpMethod.POST, Domains.TRUSTHUB.toString(), "/v1/CustomerProfiles/BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Evaluations"); request.addPostParam("PolicySid", serialize("RNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")); twilioRestClient.request(request); times = 1; result = new Response("", 500); twilioRestClient.getAccountSid(); result = "AC123"; }}; try { CustomerProfilesEvaluations.creator("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", "RNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").create(); fail("Expected TwilioException to be thrown for 500"); } catch (TwilioException e) {} } @Test public void testCreateResponse() { new NonStrictExpectations() {{ twilioRestClient.request((Request) any); result = new Response("{\"sid\": \"ELaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"account_sid\": \"ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"policy_sid\": \"RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"customer_profile_sid\": \"BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"status\": \"noncompliant\",\"date_created\": \"2020-04-28T18:14:01Z\",\"url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations/ELaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"results\": [{\"friendly_name\": \"Business\",\"object_type\": \"business\",\"passed\": false,\"failure_reason\": \"A Business End-User is missing. Please add one to the regulatory bundle.\",\"error_code\": 22214,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Business Name\",\"object_field\": \"business_name\",\"failure_reason\": \"The Business Name is missing. Please enter in a Business Name on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"Business Registration Number\",\"object_field\": \"business_registration_number\",\"failure_reason\": \"The Business Registration Number is missing. Please enter in a Business Registration Number on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing. Please enter in a First Name on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing. Please enter in a Last Name on the Business information.\",\"error_code\": 22215}],\"requirement_friendly_name\": \"Business\",\"requirement_name\": \"business_info\"},{\"friendly_name\": \"Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Business Name\",\"object_field\": \"business_name\",\"failure_reason\": \"The Business Name is missing. Or, it does not match the Business Name you entered within Business information. Please enter in the Business Name shown on the Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative or make sure both Business Name fields use the same exact inputs.\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Business Name\",\"requirement_name\": \"business_name_info\"},{\"friendly_name\": \"Excerpt from the commercial register showing French address\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register showing French address is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Address sid(s)\",\"object_field\": \"address_sids\",\"failure_reason\": \"The Address is missing. Please enter in the address shown on the Excerpt from the commercial register showing French address.\",\"error_code\": 22219}],\"requirement_friendly_name\": \"Business Address (Proof of Address)\",\"requirement_name\": \"business_address_proof_info\"},{\"friendly_name\": \"Excerpt from the commercial register (Extrait K-bis)\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register (Extrait K-bis) is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Document Number\",\"object_field\": \"document_number\",\"failure_reason\": \"The Document Number is missing. Please enter in the Document Number shown on the Excerpt from the commercial register (Extrait K-bis).\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Business Registration Number\",\"requirement_name\": \"business_reg_no_info\"},{\"friendly_name\": \"Government-issued ID\",\"object_type\": \"government_issued_document\",\"passed\": false,\"failure_reason\": \"A Government-issued ID is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing. Or, it does not match the First Name you entered within Business information. Please enter in the First Name shown on the Government-issued ID or make sure both First Name fields use the same exact inputs.\",\"error_code\": 22217},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing. Or, it does not match the Last Name you entered within Business information. Please enter in the Last Name shown on the Government-issued ID or make sure both Last Name fields use the same exact inputs.\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Name of Authorized Representative\",\"requirement_name\": \"name_of_auth_rep_info\"},{\"friendly_name\": \"Executed Copy of Power of Attorney\",\"object_type\": \"power_of_attorney\",\"passed\": false,\"failure_reason\": \"An Executed Copy of Power of Attorney is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [],\"requirement_friendly_name\": \"Power of Attorney\",\"requirement_name\": \"power_of_attorney_info\"},{\"friendly_name\": \"Government-issued ID\",\"object_type\": \"government_issued_document\",\"passed\": false,\"failure_reason\": \"A Government-issued ID is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing on the Governnment-Issued ID.\",\"error_code\": 22217},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing on the Government-issued ID\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Name of Person granted the Power of Attorney\",\"requirement_name\": \"name_in_power_of_attorney_info\"}]}", TwilioRestClient.HTTP_STATUS_CODE_CREATED); twilioRestClient.getObjectMapper(); result = new ObjectMapper(); }}; CustomerProfilesEvaluations.creator("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", "RNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").create(); } @Test public void testReadRequest() { new NonStrictExpectations() {{ Request request = new Request(HttpMethod.GET, Domains.TRUSTHUB.toString(), "/v1/CustomerProfiles/BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Evaluations"); twilioRestClient.request(request); times = 1; result = new Response("", 500); twilioRestClient.getAccountSid(); result = "AC123"; }}; try { CustomerProfilesEvaluations.reader("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").read(); fail("Expected TwilioException to be thrown for 500"); } catch (TwilioException e) {} } @Test public void testReadEmptyResponse() { new NonStrictExpectations() {{ twilioRestClient.request((Request) any); result = new Response("{\"results\": [],\"meta\": {\"page\": 0,\"page_size\": 50,\"first_page_url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations?PageSize=50&Page=0\",\"previous_page_url\": null,\"url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations?PageSize=50&Page=0\",\"next_page_url\": null,\"key\": \"results\"}}", TwilioRestClient.HTTP_STATUS_CODE_OK); twilioRestClient.getObjectMapper(); result = new ObjectMapper(); }}; assertNotNull(CustomerProfilesEvaluations.reader("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").read()); } @Test public void testReadFullResponse() { new NonStrictExpectations() {{ twilioRestClient.request((Request) any); result = new Response("{\"results\": [{\"sid\": \"ELaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"account_sid\": \"ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"policy_sid\": \"RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"customer_profile_sid\": \"BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"status\": \"noncompliant\",\"date_created\": \"2020-04-28T18:14:01Z\",\"url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations/ELaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"results\": [{\"friendly_name\": \"Business\",\"object_type\": \"business\",\"passed\": false,\"failure_reason\": \"A Business End-User is missing. Please add one to the regulatory bundle.\",\"error_code\": 22214,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Business Name\",\"object_field\": \"business_name\",\"failure_reason\": \"The Business Name is missing. Please enter in a Business Name on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"Business Registration Number\",\"object_field\": \"business_registration_number\",\"failure_reason\": \"The Business Registration Number is missing. Please enter in a Business Registration Number on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing. Please enter in a First Name on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing. Please enter in a Last Name on the Business information.\",\"error_code\": 22215}],\"requirement_friendly_name\": \"Business\",\"requirement_name\": \"business_info\"},{\"friendly_name\": \"Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Business Name\",\"object_field\": \"business_name\",\"failure_reason\": \"The Business Name is missing. Or, it does not match the Business Name you entered within Business information. Please enter in the Business Name shown on the Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative or make sure both Business Name fields use the same exact inputs.\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Business Name\",\"requirement_name\": \"business_name_info\"},{\"friendly_name\": \"Excerpt from the commercial register showing French address\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register showing French address is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Address sid(s)\",\"object_field\": \"address_sids\",\"failure_reason\": \"The Address is missing. Please enter in the address shown on the Excerpt from the commercial register showing French address.\",\"error_code\": 22219}],\"requirement_friendly_name\": \"Business Address (Proof of Address)\",\"requirement_name\": \"business_address_proof_info\"},{\"friendly_name\": \"Excerpt from the commercial register (Extrait K-bis)\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register (Extrait K-bis) is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Document Number\",\"object_field\": \"document_number\",\"failure_reason\": \"The Document Number is missing. Please enter in the Document Number shown on the Excerpt from the commercial register (Extrait K-bis).\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Business Registration Number\",\"requirement_name\": \"business_reg_no_info\"},{\"friendly_name\": \"Government-issued ID\",\"object_type\": \"government_issued_document\",\"passed\": false,\"failure_reason\": \"A Government-issued ID is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing. Or, it does not match the First Name you entered within Business information. Please enter in the First Name shown on the Government-issued ID or make sure both First Name fields use the same exact inputs.\",\"error_code\": 22217},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing. Or, it does not match the Last Name you entered within Business information. Please enter in the Last Name shown on the Government-issued ID or make sure both Last Name fields use the same exact inputs.\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Name of Authorized Representative\",\"requirement_name\": \"name_of_auth_rep_info\"},{\"friendly_name\": \"Executed Copy of Power of Attorney\",\"object_type\": \"power_of_attorney\",\"passed\": false,\"failure_reason\": \"An Executed Copy of Power of Attorney is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [],\"requirement_friendly_name\": \"Power of Attorney\",\"requirement_name\": \"power_of_attorney_info\"},{\"friendly_name\": \"Government-issued ID\",\"object_type\": \"government_issued_document\",\"passed\": false,\"failure_reason\": \"A Government-issued ID is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing on the Governnment-Issued ID.\",\"error_code\": 22217},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing on the Government-issued ID\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Name of Person granted the Power of Attorney\",\"requirement_name\": \"name_in_power_of_attorney_info\"}]}],\"meta\": {\"page\": 0,\"page_size\": 50,\"first_page_url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations?PageSize=50&Page=0\",\"previous_page_url\": null,\"url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations?PageSize=50&Page=0\",\"next_page_url\": null,\"key\": \"results\"}}", TwilioRestClient.HTTP_STATUS_CODE_OK); twilioRestClient.getObjectMapper(); result = new ObjectMapper(); }}; assertNotNull(CustomerProfilesEvaluations.reader("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").read()); } @Test public void testFetchRequest() { new NonStrictExpectations() {{ Request request = new Request(HttpMethod.GET, Domains.TRUSTHUB.toString(), "/v1/CustomerProfiles/BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Evaluations/ELXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"); twilioRestClient.request(request); times = 1; result = new Response("", 500); twilioRestClient.getAccountSid(); result = "AC123"; }}; try { CustomerProfilesEvaluations.fetcher("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", "ELXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch(); fail("Expected TwilioException to be thrown for 500"); } catch (TwilioException e) {} } @Test public void testFetchResponse() { new NonStrictExpectations() {{ twilioRestClient.request((Request) any); result = new Response("{\"sid\": \"ELaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"account_sid\": \"ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"policy_sid\": \"RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"customer_profile_sid\": \"BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"status\": \"noncompliant\",\"date_created\": \"2020-04-28T18:14:01Z\",\"url\": \"https://trusthub.twilio.com/v1/CustomerProfiles/BUaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Evaluations/ELaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\"results\": [{\"friendly_name\": \"Business\",\"object_type\": \"business\",\"passed\": false,\"failure_reason\": \"A Business End-User is missing. Please add one to the regulatory bundle.\",\"error_code\": 22214,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Business Name\",\"object_field\": \"business_name\",\"failure_reason\": \"The Business Name is missing. Please enter in a Business Name on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"Business Registration Number\",\"object_field\": \"business_registration_number\",\"failure_reason\": \"The Business Registration Number is missing. Please enter in a Business Registration Number on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing. Please enter in a First Name on the Business information.\",\"error_code\": 22215},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing. Please enter in a Last Name on the Business information.\",\"error_code\": 22215}],\"requirement_friendly_name\": \"Business\",\"requirement_name\": \"business_info\"},{\"friendly_name\": \"Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Business Name\",\"object_field\": \"business_name\",\"failure_reason\": \"The Business Name is missing. Or, it does not match the Business Name you entered within Business information. Please enter in the Business Name shown on the Excerpt from the commercial register (Extrait K-bis) showing name of Authorized Representative or make sure both Business Name fields use the same exact inputs.\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Business Name\",\"requirement_name\": \"business_name_info\"},{\"friendly_name\": \"Excerpt from the commercial register showing French address\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register showing French address is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Address sid(s)\",\"object_field\": \"address_sids\",\"failure_reason\": \"The Address is missing. Please enter in the address shown on the Excerpt from the commercial register showing French address.\",\"error_code\": 22219}],\"requirement_friendly_name\": \"Business Address (Proof of Address)\",\"requirement_name\": \"business_address_proof_info\"},{\"friendly_name\": \"Excerpt from the commercial register (Extrait K-bis)\",\"object_type\": \"commercial_registrar_excerpt\",\"passed\": false,\"failure_reason\": \"An Excerpt from the commercial register (Extrait K-bis) is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"Document Number\",\"object_field\": \"document_number\",\"failure_reason\": \"The Document Number is missing. Please enter in the Document Number shown on the Excerpt from the commercial register (Extrait K-bis).\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Business Registration Number\",\"requirement_name\": \"business_reg_no_info\"},{\"friendly_name\": \"Government-issued ID\",\"object_type\": \"government_issued_document\",\"passed\": false,\"failure_reason\": \"A Government-issued ID is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing. Or, it does not match the First Name you entered within Business information. Please enter in the First Name shown on the Government-issued ID or make sure both First Name fields use the same exact inputs.\",\"error_code\": 22217},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing. Or, it does not match the Last Name you entered within Business information. Please enter in the Last Name shown on the Government-issued ID or make sure both Last Name fields use the same exact inputs.\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Name of Authorized Representative\",\"requirement_name\": \"name_of_auth_rep_info\"},{\"friendly_name\": \"Executed Copy of Power of Attorney\",\"object_type\": \"power_of_attorney\",\"passed\": false,\"failure_reason\": \"An Executed Copy of Power of Attorney is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [],\"requirement_friendly_name\": \"Power of Attorney\",\"requirement_name\": \"power_of_attorney_info\"},{\"friendly_name\": \"Government-issued ID\",\"object_type\": \"government_issued_document\",\"passed\": false,\"failure_reason\": \"A Government-issued ID is missing. Please add one to the regulatory bundle.\",\"error_code\": 22216,\"valid\": [],\"invalid\": [{\"friendly_name\": \"First Name\",\"object_field\": \"first_name\",\"failure_reason\": \"The First Name is missing on the Governnment-Issued ID.\",\"error_code\": 22217},{\"friendly_name\": \"Last Name\",\"object_field\": \"last_name\",\"failure_reason\": \"The Last Name is missing on the Government-issued ID\",\"error_code\": 22217}],\"requirement_friendly_name\": \"Name of Person granted the Power of Attorney\",\"requirement_name\": \"name_in_power_of_attorney_info\"}]}", TwilioRestClient.HTTP_STATUS_CODE_OK); twilioRestClient.getObjectMapper(); result = new ObjectMapper(); }}; assertNotNull(CustomerProfilesEvaluations.fetcher("BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", "ELXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()); } }
package com.example.demo.domain.docube; import com.example.demo.domain.docube.data.Docube; import com.example.demo.domain.docube.dto.DocubeDto; import java.util.Date; /** * @author : Jaden * @since : 20/09/2018 */ public class DocubeConverter { public static DocubeDto toDocubeDto(Docube docube) { return new DocubeDto() .setDocubeId(docube.getId()) .setTitle(docube.getTitle()) .setBody(docube.getBody()) .setCategory(docube.getCategory()) .setWriter(docube.getWriter()) .setTags(docube.getTags()) .setLike(docube.getLike()) .setUserId(docube.getUserId()) .setCreatedDate(docube.getCreatedDate()) .setUpdatedDate(docube.getUpdateDate()); } public static Docube toDocube(DocubeDto docubeDto) { Docube docube = new Docube(); docube.setTitle(docubeDto.getTitle()); docube.setBody(docubeDto.getBody()); docube.setTags(docubeDto.getTags()); docube.setLike(docubeDto.getLike()); docube.setWriter(docubeDto.getWriter()); docube.setCategory(docubeDto.getCategory()); docube.setUserId(docubeDto.getUserId()); docube.setCreatedDate(new Date()); docube.setUpdateDate(new Date()); docube.setDeleted(false); return docube; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.test; import junit.framework.TestResult; import javax.jcr.Node; import javax.jcr.PropertyType; import javax.jcr.Session; import javax.jcr.NodeIterator; import javax.jcr.RepositoryException; import javax.jcr.NamespaceRegistry; import javax.jcr.Repository; import javax.jcr.NamespaceException; import javax.jcr.RangeIterator; import javax.jcr.UnsupportedRepositoryOperationException; import javax.jcr.Value; import javax.jcr.ValueFactory; import javax.jcr.nodetype.NoSuchNodeTypeException; import javax.jcr.nodetype.NodeDefinition; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.NodeType; import javax.jcr.nodetype.PropertyDefinition; import javax.jcr.retention.RetentionManager; import java.util.StringTokenizer; import java.util.Random; import java.util.List; import java.util.Arrays; /** * Abstract base class for all JCR test classes. */ public abstract class AbstractJCRTest extends JUnitTest { /** * Pool of helper objects to access repository transparently */ private static final RepositoryHelperPool HELPER_POOL = RepositoryHelperPoolImpl.getInstance(); /** * Namespace URI for jcr prefix. */ public static final String NS_JCR_URI = "http://www.jcp.org/jcr/1.0"; /** * Namespace URI for nt prefix. */ public static final String NS_NT_URI = "http://www.jcp.org/jcr/nt/1.0"; /** * Namespace URI for mix prefix. */ public static final String NS_MIX_URI = "http://www.jcp.org/jcr/mix/1.0"; /** * Namespace URI for sv prefix */ public static final String NS_SV_URI = "http://www.jcp.org/jcr/sv/1.0"; /** * The repository helper for this test. */ private RepositoryHelper helper; /** * JCR Name jcr:primaryType using the namespace resolver of the current session. */ protected String jcrPrimaryType; /** * JCR Name jcr:mixinTypes using the namespace resolver of the current session. */ protected String jcrMixinTypes; /** * JCR Name jcr:predecessors using the namespace resolver of the current session. */ protected String jcrPredecessors; /** * JCR Name jcr:successors using the namespace resolver of the current session. */ protected String jcrSuccessors; /** * JCR Name jcr:created using the namespace resolver of the current session. */ protected String jcrCreated; /** * JCR Name jcr:created using the namespace resolver of the current session. */ protected String jcrVersionHistory; /** * JCR Name jcr:copiedFrom using the namespace resolver of the current session. */ protected String jcrCopiedFrom; /** * JCR Name jcr:frozenNode using the namespace resolver of the current session. */ protected String jcrFrozenNode; /** * JCR Name jcr:frozenUuid using the namespace resolver of the current session. */ protected String jcrFrozenUuid; /** * JCR Name jcr:rootVersion using the namespace resolver of the current session. */ protected String jcrRootVersion; /** * JCR Name jcr:isCheckedOut using the namespace resolver of the current session. */ protected String jcrIsCheckedOut; /** * JCR Name jcr:baseVersion using the namespace resolver of the current session. */ protected String jcrBaseVersion; /** * JCR Name jcr:uuid using the namespace resolver of the current session. */ protected String jcrUUID; /** * JCR Name jcr:lockOwner using the namespace resolver of the current session. */ protected String jcrLockOwner; /** * JCR Name jcr:lockIsDeep using the namespace resolver of the current session. */ protected String jcrlockIsDeep; /** * JCR Name jcr:mergeFailed using the namespace resolver of the current session. */ protected String jcrMergeFailed; /** * JCR Name jcr:system using the namespace resolver of the current session. */ protected String jcrSystem; /** * JCR Name nt:base using the namespace resolver of the current session. */ protected String ntBase; /** * JCR Name nt:unstructured using the namespace resolver of the current session. */ protected String ntUnstructured; /** * JCR Name nt:version using the namespace resolver of the current session. */ protected String ntVersion; /** * JCR Name nt:versionHistory using the namespace resolver of the current session. */ protected String ntVersionHistory; /** * JCR Name nt:versionHistory using the namespace resolver of the current session. */ protected String ntVersionLabels; /** * JCR Name nt:frozenNode using the namespace resolver of the current session. */ protected String ntFrozenNode; /** * JCR Name mix:referenceable using the namespace resolver of the current session. */ protected String mixReferenceable; /** * JCR Name mix:versionable using the namespace resolver of the current session. */ protected String mixVersionable; /** * JCR Name mix:simpleVersionable using the namespace resolver of the current session. */ protected String mixSimpleVersionable; /** * JCR Name mix:lockable using the namespace resolver of the current session. */ protected String mixLockable; /** * JCR Name mix:title using the namespace resolver of the current session. */ protected String mixTitle; /** * JCR Name mix:shareable using the namespace resolver of the current session. */ protected String mixShareable; /** * JCR Name nt:query using the namespace resolver of the current session. */ protected String ntQuery; /** * Relative path to the test root node. */ protected String testPath; /** * Absolute path to the test root node. */ protected String testRoot; /** * The node type name for newly created nodes. */ protected String testNodeType; /** * The node type name for the test root node. */ protected String testNodeTypeTestRoot; /** * A node type that does not allow any child nodes, such as nt:base. */ protected String testNodeTypeNoChildren; /** * Name of a node that will be created during a test case. */ protected String nodeName1; /** * Name of a node that will be created during a test case. */ protected String nodeName2; /** * Name of a node that will be created during a test case. */ protected String nodeName3; /** * Name of a node that will be created during a test case. */ protected String nodeName4; /** * Name of a property that will be used during a test case. */ protected String propertyName1; /** * Name of a property that will be used during a test case. */ protected String propertyName2; /** * Name of a workspace to use instead of the default workspace. */ protected String workspaceName; /** * The superuser session for the default workspace */ protected Session superuser; /** * Flag that indicates if the current test is a read-only test, that is * no content is written to the workspace by the test. */ protected boolean isReadOnly = false; /** * The root <code>Node</code> for testing */ protected Node testRootNode; /** * The value factory for {@link #superuser}. */ protected ValueFactory vf; protected void setUp() throws Exception { super.setUp(); testRoot = getProperty(RepositoryStub.PROP_TESTROOT); if (testRoot == null) { fail("Property '" + RepositoryStub.PROP_TESTROOT + "' is not defined."); } // cut off '/' to build testPath testPath = testRoot.substring(1); testNodeType = getProperty(RepositoryStub.PROP_NODETYPE); testNodeTypeTestRoot = getProperty(RepositoryStub.PROP_NODETYPETESTROOT); if (testNodeTypeTestRoot == null) { testNodeTypeTestRoot = testNodeType; // backwards compatibility } testNodeTypeNoChildren = getProperty(RepositoryStub.PROP_NODETYPENOCHILDREN); // setup node names nodeName1 = getProperty(RepositoryStub.PROP_NODE_NAME1); if (nodeName1 == null) { fail("Property '" + RepositoryStub.PROP_NODE_NAME1 + "' is not defined."); } nodeName2 = getProperty(RepositoryStub.PROP_NODE_NAME2); if (nodeName2 == null) { fail("Property '" + RepositoryStub.PROP_NODE_NAME2 + "' is not defined."); } nodeName3 = getProperty(RepositoryStub.PROP_NODE_NAME3); if (nodeName3 == null) { fail("Property '" + RepositoryStub.PROP_NODE_NAME3 + "' is not defined."); } nodeName4 = getProperty(RepositoryStub.PROP_NODE_NAME4); if (nodeName4 == null) { fail("Property '" + RepositoryStub.PROP_NODE_NAME4 + "' is not defined."); } propertyName1 = getProperty(RepositoryStub.PROP_PROP_NAME1); if (propertyName1 == null) { fail("Property '" + RepositoryStub.PROP_PROP_NAME1 + "' is not defined."); } propertyName2 = getProperty(RepositoryStub.PROP_PROP_NAME2); if (propertyName2 == null) { fail("Property '" + RepositoryStub.PROP_PROP_NAME2 + "' is not defined."); } workspaceName = getProperty(RepositoryStub.PROP_WORKSPACE_NAME); if (workspaceName == null) { fail("Property '" + RepositoryStub.PROP_WORKSPACE_NAME + "' is not defined."); } superuser = getHelper().getSuperuserSession(); // setup some common names jcrPrimaryType = superuser.getNamespacePrefix(NS_JCR_URI) + ":primaryType"; jcrMixinTypes = superuser.getNamespacePrefix(NS_JCR_URI) + ":mixinTypes"; jcrPredecessors = superuser.getNamespacePrefix(NS_JCR_URI) + ":predecessors"; jcrSuccessors = superuser.getNamespacePrefix(NS_JCR_URI) + ":successors"; jcrCreated = superuser.getNamespacePrefix(NS_JCR_URI) + ":created"; jcrVersionHistory = superuser.getNamespacePrefix(NS_JCR_URI) + ":versionHistory"; jcrCopiedFrom = superuser.getNamespacePrefix(NS_JCR_URI) + ":copiedFrom"; jcrFrozenNode = superuser.getNamespacePrefix(NS_JCR_URI) + ":frozenNode"; jcrFrozenUuid = superuser.getNamespacePrefix(NS_JCR_URI) + ":frozenUuid"; jcrRootVersion = superuser.getNamespacePrefix(NS_JCR_URI) + ":rootVersion"; jcrBaseVersion = superuser.getNamespacePrefix(NS_JCR_URI) + ":baseVersion"; jcrIsCheckedOut = superuser.getNamespacePrefix(NS_JCR_URI) + ":isCheckedOut"; jcrUUID = superuser.getNamespacePrefix(NS_JCR_URI) + ":uuid"; jcrLockOwner = superuser.getNamespacePrefix(NS_JCR_URI) + ":lockOwner"; jcrlockIsDeep = superuser.getNamespacePrefix(NS_JCR_URI) + ":lockIsDeep"; jcrMergeFailed = superuser.getNamespacePrefix(NS_JCR_URI) + ":mergeFailed"; jcrSystem = superuser.getNamespacePrefix(NS_JCR_URI) + ":system"; ntBase = superuser.getNamespacePrefix(NS_NT_URI) + ":base"; ntUnstructured = superuser.getNamespacePrefix(NS_NT_URI) + ":unstructured"; ntVersion = superuser.getNamespacePrefix(NS_NT_URI) + ":version"; ntVersionHistory = superuser.getNamespacePrefix(NS_NT_URI) + ":versionHistory"; ntVersionLabels = superuser.getNamespacePrefix(NS_NT_URI) + ":versionLabels"; ntFrozenNode = superuser.getNamespacePrefix(NS_NT_URI) + ":frozenNode"; mixReferenceable = superuser.getNamespacePrefix(NS_MIX_URI) + ":referenceable"; mixVersionable = superuser.getNamespacePrefix(NS_MIX_URI) + ":versionable"; mixSimpleVersionable = superuser.getNamespacePrefix(NS_MIX_URI) + ":simpleVersionable"; mixLockable = superuser.getNamespacePrefix(NS_MIX_URI) + ":lockable"; mixShareable = superuser.getNamespacePrefix(NS_MIX_URI) + ":shareable"; mixTitle = superuser.getNamespacePrefix(NS_MIX_URI) + ":title"; ntQuery = superuser.getNamespacePrefix(NS_NT_URI) + ":query"; // setup custom namespaces if (isSupported(Repository.LEVEL_2_SUPPORTED)) { NamespaceRegistry nsReg = superuser.getWorkspace().getNamespaceRegistry(); String namespaces = getProperty(RepositoryStub.PROP_NAMESPACES); if (namespaces != null) { String[] prefixes = namespaces.split(" "); for (int i = 0; i < prefixes.length; i++) { String uri = getProperty(RepositoryStub.PROP_NAMESPACES + "." + prefixes[i]); if (uri != null) { try { nsReg.getPrefix(uri); } catch (NamespaceException e) { // not yet registered nsReg.registerNamespace(prefixes[i], uri); } } } } vf = superuser.getValueFactory(); } if (isReadOnly) { if (testPath.length() == 0) { // test root is the root node testRootNode = superuser.getRootNode(); } else if (!superuser.getRootNode().hasNode(testPath)) { cleanUp(); fail("Workspace does not contain test data at: " + testRoot); } else { testRootNode = superuser.getRootNode().getNode(testPath); } } else if (isSupported(Repository.LEVEL_2_SUPPORTED)) { testRootNode = cleanUpTestRoot(superuser); // also clean second workspace Session s = getHelper().getSuperuserSession(workspaceName); try { cleanUpTestRoot(s); } finally { s.logout(); } } else { cleanUp(); fail("Test case requires level 2 support."); } } protected void cleanUp() throws Exception { if (superuser != null) { try { if (!isReadOnly && isSupported(Repository.LEVEL_2_SUPPORTED)) { cleanUpTestRoot(superuser); } } catch (Exception e) { log.println("Exception in tearDown: " + e.toString()); } finally { superuser.logout(); superuser = null; vf = null; } } testRootNode = null; } protected void tearDown() throws Exception { cleanUp(); super.tearDown(); } /** * Runs the test cases of this test class and reports the results to * <code>testResult</code>. In contrast to the default implementation of * <code>TestCase.run()</code> this method will suppress tests errors with * a {@link NotExecutableException}. That is, test cases that throw this * exception will still result as successful. * @param testResult the test result. */ public void run(TestResult testResult) { try { helper = HELPER_POOL.borrowHelper(); try { super.run(new JCRTestResult(testResult, log)); } finally { HELPER_POOL.returnHelper(helper); helper = null; } } catch (InterruptedException e) { throw new RuntimeException(e); } } /** * @return the repository helper instance that is associated with this test. */ protected RepositoryHelper getHelper() { return helper; } /** * Returns the value of the configuration property with <code>propName</code>. * The sequence how configuration properties are read is the follwoing: * <ol> * <li><code>javax.jcr.tck.&lt;testClassName&gt;.&lt;testCaseName&gt;.&lt;propName&gt;</code></li> * <li><code>javax.jcr.tck.&lt;testClassName&gt;.&lt;propName&gt;</code></li> * <li><code>javax.jcr.tck.&lt;packageName&gt;.&lt;propName&gt;</code></li> * <li><code>javax.jcr.tck.&lt;propName&gt;</code></li> * </ol> * Where: * <ul> * <li><code>&lt;testClassName&gt;</code> is the name of the test class without package prefix.</li> * <li><code>&lt;testMethodName&gt;</code> is the name of the test method</li> * <li><code>&lt;packageName&gt;</code> is the name of the package of the test class. * Example: packageName for <code>org.apache.jackrabbit.test.api.BooleanPropertyTest</code>: <code>api</code></li> * </ul> * @param propName the propName of the configration property. * @return the value of the property or <code>null</code> if the property * does not exist. * @throws RepositoryException if an error occurs while reading from * the configuration. */ public String getProperty(String propName) throws RepositoryException { String testCaseName = getName(); String testClassName = getClass().getName(); String testPackName = ""; int idx; if ((idx = testClassName.lastIndexOf('.')) > -1) { testPackName = testClassName.substring(testClassName.lastIndexOf('.', idx - 1) + 1, idx); testClassName = testClassName.substring(idx + 1); } // 1) test case specific property first String value = getHelper().getProperty(RepositoryStub.PROP_PREFIX + "." + testClassName + "." + testCaseName + "." + propName); if (value != null) { return value; } // 2) check test class property value = getHelper().getProperty(RepositoryStub.PROP_PREFIX + "." + testClassName + "." + propName); if (value != null) { return value; } // 3) check package property value = getHelper().getProperty(RepositoryStub.PROP_PREFIX + "." + testPackName + "." + propName); if (value != null) { return value; } // finally try global property return getHelper().getProperty(RepositoryStub.PROP_PREFIX + "." + propName); } /** * Returns the value of the configuration property with specified * <code>name</code>. If the property does not exist <code>defaultValue</code> is * returned. * <p> * Configuration properties are defined in the file: * <code>repositoryStubImpl.properties</code>. * * @param name the name of the property to retrieve. * @param defaultValue the default value if the property does not exist. * @return the value of the property or <code>defaultValue</code> if non existent. * @throws RepositoryException if the configuration file cannot be found. */ public String getProperty(String name, String defaultValue) throws RepositoryException { String val = getProperty(name); if (val == null) { val = defaultValue; } return val; } /** * Create a JCR value based on the configuration. * * @param s * @param valueProp Name of the config property that contains the property value. * @param typeProp Name of the config property that contains the property type. * If the config parameter is missing, {@link PropertyType#STRING} is used * to create the JCR value. * @param defaultValue Default value to be used if the config does not define * the value property. * @return JCR value to be used for a test. * @throws RepositoryException */ public Value getJcrValue(Session s, String valueProp, String typeProp, String defaultValue) throws RepositoryException { ValueFactory vf = s.getValueFactory(); String val = getProperty(valueProp, defaultValue); int type = PropertyType.valueFromName(getProperty(typeProp, PropertyType.TYPENAME_STRING)); return vf.createValue(val, type); } /** * Returns the size of the <code>RangeIterator</code> <code>it</code>. * Note, that the <code>RangeIterator</code> might get consumed, because * {@link RangeIterator#getSize()} might return -1 (information unavailable). * @param it a <code>RangeIterator</code>. * @return the size of the iterator (number of elements). */ protected long getSize(RangeIterator it) { long size = it.getSize(); if (size != -1) { return size; } size = 0; while (it.hasNext()) { it.next(); size++; } return size; } /** * Returns the local name for the given <code>jcrName</code>. * * @param jcrName * the name. * @return the local name part. */ protected static String getLocalName(String jcrName) { int idx = jcrName.indexOf(':'); if (idx != -1) { return jcrName.substring(idx + 1); } else { return jcrName; } } /** * Returns the prefix for the given <code>jcrName</code>. * * @param jcrName * the name. * @return the prefix part (empty string when not prefixed) */ protected static String getPrefix(String jcrName) { int idx = jcrName.indexOf(':'); if (idx != -1) { return jcrName.substring(0, idx); } else { return ""; } } /** * Returns the expanded name for the given <code>jcrName</code>. * * @param jcrName * the name. * @return the expanded name representation * @throws RepositoryException * @throws NamespaceException */ protected static String getQualifiedName(Session session, String jcrName) throws RepositoryException { String prefix = getPrefix(jcrName); String namespace = session.getNamespaceURI(prefix); String localname = getLocalName(jcrName); return (namespace.length() > 0 ? "{" + namespace + "}" : "{}") + localname; } /** * Returns the name of a workspace that is not accessible from * <code>session</code>. * @param session the session. * @return name of a non existing workspace. * @throws RepositoryException if an error occurs. */ protected String getNonExistingWorkspaceName(Session session) throws RepositoryException { List<String> names = Arrays.asList(session.getWorkspace().getAccessibleWorkspaceNames()); String nonExisting = null; while (nonExisting == null) { String name = createRandomString(10); if (!names.contains(name)) { nonExisting = name; } } return nonExisting; } /** * Creates a <code>String</code> with a random sequence of characters * using 'a' - 'z'. * @param numChars number of characters. * @return the generated String. */ protected String createRandomString(int numChars) { Random rand = new Random(System.currentTimeMillis()); StringBuffer tmp = new StringBuffer(numChars); for (int i = 0; i < numChars; i++) { char c = (char) (rand.nextInt(('z' + 1) - 'a') + 'a'); tmp.append(c); } return tmp.toString(); } /** * Returns <code>true</code> if this repository support a certain optional * feature; otherwise <code>false</code> is returned. If there is no * such <code>descriptorKey</code> present in the repository, this method * also returns false. * * @param descriptorKey the descriptor key. * @return <code>true</code> if the option is supported. * @throws RepositoryException if an error occurs. */ protected boolean isSupported(String descriptorKey) throws RepositoryException { return "true".equals(getHelper().getRepository().getDescriptor(descriptorKey)); } /** * Throws a <code>NotExecutableException</code> if the repository does * not support the feature identified by the given <code>discriptorKey</code>. * * @param descriptorKey the descriptor key. * @throws RepositoryException if an error occurs. * @throws NotExecutableException If the feature is not supported. */ protected void checkSupportedOption(String descriptorKey) throws RepositoryException, NotExecutableException { String value = getHelper().getRepository().getDescriptor(descriptorKey); if (value == null || ! Boolean.valueOf(value).booleanValue()) { throw new NotExecutableException ( "Repository feature not supported: " + descriptorKey); } } /** * Checks that the repository supports multiple workspace, otherwise aborts with * {@link NotExecutableException}. * @throws NotExecutableException when the repository only supports a single * workspace */ protected void ensureMultipleWorkspacesSupported() throws RepositoryException, NotExecutableException { String workspacenames[] = superuser.getWorkspace().getAccessibleWorkspaceNames(); if (workspacenames == null || workspacenames.length < 2) { throw new NotExecutableException("This repository does not seem to support multiple workspaces."); } } /** * Checks that the repository supports locking, otherwise aborts with * {@link NotExecutableException}. * @throws NotExecutableException when the repository does not support locking */ protected void ensureLockingSupported() throws RepositoryException, NotExecutableException { if (!isSupported(Repository.OPTION_LOCKING_SUPPORTED)) { throw new NotExecutableException("This repository does not support locking."); } } private boolean canSetProperty(NodeType nodeType, String propertyName, int propertyType, boolean isMultiple) { PropertyDefinition propDefs[] = nodeType.getPropertyDefinitions(); for (int i = 0; i < propDefs.length; i++) { if (propDefs[i].getName().equals(propertyName) || propDefs[i].getName().equals("*")) { if ((propDefs[i].getRequiredType() == propertyType || propDefs[i].getRequiredType() == PropertyType.UNDEFINED) && propDefs[i].isMultiple() == isMultiple) { return true; } } } return false; } private boolean canSetProperty(Node node, String propertyName, int propertyType, boolean isMultiple) throws RepositoryException { if (canSetProperty(node.getPrimaryNodeType(), propertyName, propertyType, isMultiple)) { return true; } else { NodeType mixins[] = node.getMixinNodeTypes(); boolean canSetIt = false; for (int i = 0; i < mixins.length && !canSetIt; i++) { canSetIt |= canSetProperty(mixins[i], propertyName, propertyType, isMultiple); } return canSetIt; } } /** * Checks that the repository can set the property to the required type, otherwise aborts with * {@link NotExecutableException}. * @throws NotExecutableException when setting the property to the required * type is not supported */ protected void ensureCanSetProperty(Node node, String propertyName, int propertyType, boolean isMultiple) throws NotExecutableException, RepositoryException { if (! canSetProperty(node, propertyName, propertyType, isMultiple)) { throw new NotExecutableException("configured property name " + propertyName + " can not be set on node " + node.getPath()); } } /** * Checks that the repository can set the property to the required type, otherwise aborts with * {@link NotExecutableException}. * @throws NotExecutableException when setting the property to the required * type is not supported */ protected void ensureCanSetProperty(Node node, String propertyName, Value value) throws NotExecutableException, RepositoryException { ensureCanSetProperty(node, propertyName, value.getType(), false); } /** * Checks that the repository can set the property to the required type, otherwise aborts with * {@link NotExecutableException}. * @throws NotExecutableException when setting the property to the required * type is not supported */ protected void ensureCanSetProperty(Node node, String propertyName, Value[] values) throws NotExecutableException, RepositoryException { int propertyType = values.length == 0 ? PropertyType.UNDEFINED : values[0].getType(); if (! canSetProperty(node, propertyName, propertyType, true)) { throw new NotExecutableException("configured property name " + propertyName + " can not be set on node " + node.getPath()); } } /** * Checks that the repository supports the specified node type, otherwise aborts with * {@link NotExecutableException} * @throws NotExecutableException when the specified node type is unknown */ protected void ensureKnowsNodeType(Session session, String nodetype) throws NotExecutableException, RepositoryException { try { session.getWorkspace().getNodeTypeManager().getNodeType(nodetype); } catch (NoSuchNodeTypeException ex) { throw new NotExecutableException("Repository does not support node type " + nodetype); } } /** * Ensures that the given <code>node</code> is of the given mixin type. * * @param node a node. * @param mixin the name of a mixin type. * @throws NotExecutableException if the node is not of type mixin and the * mixin cannot be added. * @throws RepositoryException if an error occurs. */ protected void ensureMixinType(Node node, String mixin) throws NotExecutableException, RepositoryException { if (!node.isNodeType(mixin)) { if (node.canAddMixin(mixin)) { node.addMixin(mixin); } else { throw new NotExecutableException(node.getPath() + " does not support adding " + mixin); } } } /** * Checks whether the node already has the specified mixin node type */ protected boolean needsMixin(Node node, String mixin) throws RepositoryException { return ! node.getSession().getWorkspace().getNodeTypeManager().getNodeType(node.getPrimaryNodeType().getName()).isNodeType(mixin); } /** * Reverts any pending changes made by <code>s</code> and deletes any nodes * under {@link #testRoot}. If there is no node at {@link #testRoot} then * the necessary nodes are created. * * @param s the session to clean up. * @return the {@link javax.jcr.Node} that represents the test root. * @throws RepositoryException if an error occurs. */ protected Node cleanUpTestRoot(Session s) throws RepositoryException { // do a 'rollback' s.refresh(false); Node root = s.getRootNode(); Node testRootNode; if (root.hasNode(testPath)) { RetentionManager rm; try { rm = s.getRetentionManager(); } catch (UnsupportedRepositoryOperationException e) { rm = null; } // clean test root testRootNode = root.getNode(testPath); NodeIterator children = testRootNode.getNodes(); while (children.hasNext()) { Node child = children.nextNode(); // Remove retention policy if needed String childPath = child.getPath(); if (rm != null && rm.getRetentionPolicy(childPath) != null) { rm.removeRetentionPolicy(childPath); s.save(); } NodeDefinition nodeDef = child.getDefinition(); if (!nodeDef.isMandatory() && !nodeDef.isProtected()) { // try to remove child try { child.remove(); } catch (ConstraintViolationException e) { log.println("unable to remove node: " + child.getPath()); } } } } else { // create nodes to testPath StringTokenizer names = new StringTokenizer(testPath, "/"); Node currentNode = root; while (names.hasMoreTokens()) { String name = names.nextToken(); if (currentNode.hasNode(name)) { currentNode = currentNode.getNode(name); } else { currentNode = currentNode.addNode(name, testNodeTypeTestRoot); } } testRootNode = currentNode; } s.save(); return testRootNode; } }
import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayDeque; public class Test { public static void main(String[] args) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader((System.in))); int numberOfCommands = Integer.parseInt(reader.readLine()); ArrayDeque<Integer> stack = new ArrayDeque<>(); ArrayDeque<Integer> maxStack = new ArrayDeque<>(); int maxNumber = Integer.MIN_VALUE; StringBuilder sb = new StringBuilder(); for (int i = 0; i < numberOfCommands; i++) { String[] command = reader.readLine().split(" "); short typeOfCommand = Short.parseShort(command[0]); if (numberOfCommands <= 1 && typeOfCommand != 3) { break; } switch (typeOfCommand) { case (1): int currentElement = Integer.parseInt(command[1]); stack.push(currentElement); if (maxNumber < currentElement) { maxNumber = currentElement; maxStack.push(currentElement); } break; case (2): int current = stack.pop(); if (current == maxNumber) { maxStack.pop(); if (maxStack.size() > 0) { maxNumber = maxStack.peek(); } else { maxNumber = Short.MIN_VALUE; } } break; case (3): sb.append(maxNumber).append(System.lineSeparator()); break; } } System.out.println(sb.toString()); } }
package array; import java.util.Arrays; public class ConcatenateTwoArrays02 { public static void main(String[] args) { int[] array1 = {1, 2, 3}; int[] array2 = {4, 5, 6}; int length = array1.length + array2.length; int[] result = new int[length]; int pos = 0; for (int element : array1) { result[pos] = element; pos++; } for (int element : array2) { result[pos] = element; pos++; } System.out.println(Arrays.toString(result)); } } /* In the above program, instead of using arraycopy, we manually copy each element of both arrays array1 and array2 to result. We store the total length required for result, i.e. array1.length + array2. length. Then, we create a new array result of the length. Now, we use the for-each loop to iterate through each element of array1 and store it in the result. After assigning it, we increase the position pos by 1, pos++. Likewise, we do the same for array2 and store each element in result starting from the position after array1. */
/* * Decompiled with CFR 0.150. */ package skizzle.modules.player; import skizzle.events.Event; import skizzle.events.listeners.EventUpdate; import skizzle.modules.Module; public class InvMove extends Module { public InvMove() { super(Qprot0.0("\u6d11\u71c5\u565a\ua7e1\u4cc1\ue896\u8c20\u0167\u571b\ue7c5\u6282\uaf03\uefb8\u7248"), 23, Module.Category.PLAYER); InvMove Nigga; } public static { throw throwable; } @Override public void onEvent(Event Nigga) { if (Nigga instanceof EventUpdate) { Nigga.isPre(); } } }
/* * MIT License * * Copyright (c) 2021 MASES s.r.l. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /************************************************************************************** * <auto-generated> * This code was generated from a template using JCOReflector * * Manual changes to this file may cause unexpected behavior in your application. * Manual changes to this file will be overwritten if the code is regenerated. * </auto-generated> *************************************************************************************/ package system.windows.media; import org.mases.jcobridge.*; import org.mases.jcobridge.netreflection.*; import java.util.ArrayList; // Import section import system.windows.media.HitTestParameters; import system.windows.media.Geometry; /** * The base .NET class managing System.Windows.Media.GeometryHitTestParameters, PresentationCore, Version=5.0.4.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35. Extends {@link NetObject}. * <p> * * See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.Windows.Media.GeometryHitTestParameters" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.Windows.Media.GeometryHitTestParameters</a> */ public class GeometryHitTestParameters extends HitTestParameters { /** * Fully assembly qualified name: PresentationCore, Version=5.0.4.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35 */ public static final String assemblyFullName = "PresentationCore, Version=5.0.4.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35"; /** * Assembly name: PresentationCore */ public static final String assemblyShortName = "PresentationCore"; /** * Qualified class name: System.Windows.Media.GeometryHitTestParameters */ public static final String className = "System.Windows.Media.GeometryHitTestParameters"; static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName); /** * The type managed from JCOBridge. See {@link JCType} */ public static JCType classType = createType(); static JCEnum enumInstance = null; JCObject classInstance = null; static JCType createType() { try { String classToCreate = className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); if (JCOReflector.getDebug()) JCOReflector.writeLog("Creating %s", classToCreate); JCType typeCreated = bridge.GetType(classToCreate); if (JCOReflector.getDebug()) JCOReflector.writeLog("Created: %s", (typeCreated != null) ? typeCreated.toString() : "Returned null value"); return typeCreated; } catch (JCException e) { JCOReflector.writeLog(e); return null; } } void addReference(String ref) throws Throwable { try { bridge.AddReference(ref); } catch (JCNativeException jcne) { throw translateException(jcne); } } public GeometryHitTestParameters(Object instance) throws Throwable { super(instance); if (instance instanceof JCObject) { classInstance = (JCObject) instance; } else throw new Exception("Cannot manage object, it is not a JCObject"); } public String getJCOAssemblyName() { return assemblyFullName; } public String getJCOClassName() { return className; } public String getJCOObjectName() { return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); } public Object getJCOInstance() { return classInstance; } public void setJCOInstance(JCObject instance) { classInstance = instance; super.setJCOInstance(classInstance); } public JCType getJCOType() { return classType; } /** * Try to cast the {@link IJCOBridgeReflected} instance into {@link GeometryHitTestParameters}, a cast assert is made to check if types are compatible. * @param from {@link IJCOBridgeReflected} instance to be casted * @return {@link GeometryHitTestParameters} instance * @throws java.lang.Throwable in case of error during cast operation */ public static GeometryHitTestParameters cast(IJCOBridgeReflected from) throws Throwable { NetType.AssertCast(classType, from); return new GeometryHitTestParameters(from.getJCOInstance()); } // Constructors section public GeometryHitTestParameters() throws Throwable { } public GeometryHitTestParameters(Geometry geometry) throws Throwable, system.ArgumentException, system.ArgumentOutOfRangeException, system.ArgumentNullException, system.InvalidOperationException, system.PlatformNotSupportedException, system.IndexOutOfRangeException, system.NotSupportedException, system.ObjectDisposedException, system.RankException, system.ArrayTypeMismatchException, system.FormatException { try { // add reference to assemblyName.dll file addReference(JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); setJCOInstance((JCObject)classType.NewObject(geometry == null ? null : geometry.getJCOInstance())); } catch (JCNativeException jcne) { throw translateException(jcne); } } // Methods section // Properties section public Geometry getHitGeometry() throws Throwable, system.ArgumentNullException, system.ArgumentException, system.InvalidOperationException, system.globalization.CultureNotFoundException, system.PlatformNotSupportedException, system.ObjectDisposedException, system.security.SecurityException, system.io.IOException, system.ArgumentOutOfRangeException, system.FormatException, system.IndexOutOfRangeException, system.NotSupportedException { if (classInstance == null) throw new UnsupportedOperationException("classInstance is null."); try { JCObject val = (JCObject)classInstance.Get("HitGeometry"); return new Geometry(val); } catch (JCNativeException jcne) { throw translateException(jcne); } } // Instance Events section }
/** * Copyright (C) 2011 * Michael Mosmann <michael@mosmann.de> * Martin Jöhren <m.joehren@googlemail.com> * * with contributions from * konstantin-ba@github,Archimedes Trajano (trajano@github) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.flapdoodle.embed.mongo.doc; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.UnknownHostException; import java.util.Date; import java.util.List; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import org.junit.ClassRule; import org.junit.Test; import com.mongodb.BasicDBObject; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.MongoClient; import com.mongodb.ServerAddress; import de.flapdoodle.embed.mongo.Command; import de.flapdoodle.embed.mongo.MongoImportExecutable; import de.flapdoodle.embed.mongo.MongoImportProcess; import de.flapdoodle.embed.mongo.MongoImportStarter; import de.flapdoodle.embed.mongo.MongodExecutable; import de.flapdoodle.embed.mongo.MongodProcess; import de.flapdoodle.embed.mongo.MongodStarter; import de.flapdoodle.embed.mongo.config.DownloadConfigBuilder; import de.flapdoodle.embed.mongo.config.ExtractedArtifactStoreBuilder; import de.flapdoodle.embed.mongo.config.IMongoImportConfig; import de.flapdoodle.embed.mongo.config.IMongodConfig; import de.flapdoodle.embed.mongo.config.MongoCmdOptionsBuilder; import de.flapdoodle.embed.mongo.config.MongoImportConfigBuilder; import de.flapdoodle.embed.mongo.config.MongodConfigBuilder; import de.flapdoodle.embed.mongo.config.Net; import de.flapdoodle.embed.mongo.config.RuntimeConfigBuilder; import de.flapdoodle.embed.mongo.config.Storage; import de.flapdoodle.embed.mongo.config.Timeout; import de.flapdoodle.embed.mongo.config.processlistener.ProcessListenerBuilder; import de.flapdoodle.embed.mongo.distribution.Feature; import de.flapdoodle.embed.mongo.distribution.Version; import de.flapdoodle.embed.mongo.distribution.Versions; import de.flapdoodle.embed.mongo.examples.AbstractMongoDBTest; import de.flapdoodle.embed.mongo.examples.FileStreamProcessor; import de.flapdoodle.embed.mongo.tests.MongodForTestsFactory; import de.flapdoodle.embed.process.config.IRuntimeConfig; import de.flapdoodle.embed.process.config.io.ProcessOutput; import de.flapdoodle.embed.process.config.store.HttpProxyFactory; import de.flapdoodle.embed.process.distribution.Distribution; import de.flapdoodle.embed.process.distribution.GenericVersion; import de.flapdoodle.embed.process.distribution.IVersion; import de.flapdoodle.embed.process.extract.ITempNaming; import de.flapdoodle.embed.process.extract.UUIDTempNaming; import de.flapdoodle.embed.process.extract.UserTempNaming; import de.flapdoodle.embed.process.io.IStreamProcessor; import de.flapdoodle.embed.process.io.Processors; import de.flapdoodle.embed.process.io.directories.FixedPath; import de.flapdoodle.embed.process.io.directories.IDirectory; import de.flapdoodle.embed.process.io.progress.LoggingProgressListener; import de.flapdoodle.embed.process.runtime.ICommandLinePostProcessor; import de.flapdoodle.embed.process.runtime.Network; import de.flapdoodle.testdoc.Includes; import de.flapdoodle.testdoc.Recorder; import de.flapdoodle.testdoc.Recording; import de.flapdoodle.testdoc.TabSize; public class HowToDocTest { @ClassRule public static final Recording recording=Recorder.generateMarkDown("Howto.md",TabSize.spaces(2)); @Test public void testStandard() throws UnknownHostException, IOException { recording.begin(); MongodStarter starter = MongodStarter.getDefaultInstance(); int port = Network.getFreeServerPort(); IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .net(new Net(port, Network.localhostIsIPv6())) .build(); MongodExecutable mongodExecutable = null; try { mongodExecutable = starter.prepare(mongodConfig); MongodProcess mongod = mongodExecutable.start(); try (MongoClient mongo = new MongoClient("localhost", port)) { DB db = mongo.getDB("test"); DBCollection col = db.createCollection("testCol", new BasicDBObject()); col.save(new BasicDBObject("testDoc", new Date())); } } finally { if (mongodExecutable != null) mongodExecutable.stop(); } recording.end(); } @Test public void testCustomMongodFilename() throws UnknownHostException, IOException { recording.begin(); int port = Network.getFreeServerPort(); Command command = Command.MongoD; IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(command) .artifactStore(new ExtractedArtifactStoreBuilder() .defaults(command) .download(new DownloadConfigBuilder() .defaultsForCommand(command).build()) .executableNaming(new UserTempNaming())) .build(); IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .net(new Net(port, Network.localhostIsIPv6())) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); MongodExecutable mongodExecutable = null; try { mongodExecutable = runtime.prepare(mongodConfig); MongodProcess mongod = mongodExecutable.start(); try (MongoClient mongo = new MongoClient("localhost", port)) { DB db = mongo.getDB("test"); DBCollection col = db.createCollection("testCol", new BasicDBObject()); col.save(new BasicDBObject("testDoc", new Date())); } } finally { if (mongodExecutable != null) mongodExecutable.stop(); } recording.end(); } public void testUnitTests() { // @include AbstractMongoDBTest.java Class<?> see = AbstractMongoDBTest.class; } @Test public void testMongodForTests() throws IOException { recording.begin(); MongodForTestsFactory factory = null; try { factory = MongodForTestsFactory.with(Version.Main.PRODUCTION); try (MongoClient mongo = factory.newMongo()) { DB db = mongo.getDB("test-" + UUID.randomUUID()); DBCollection col = db.createCollection("testCol", new BasicDBObject()); col.save(new BasicDBObject("testDoc", new Date())); } } finally { if (factory != null) factory.shutdown(); } recording.end(); } @Test public void testCustomizeDownloadURL() { recording.begin(); Command command = Command.MongoD; IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(command) .artifactStore(new ExtractedArtifactStoreBuilder() .defaults(command) .download(new DownloadConfigBuilder() .defaultsForCommand(command) .downloadPath("http://my.custom.download.domain/"))) .build(); recording.end(); } @Test public void testCustomProxy() { recording.begin(); Command command = Command.MongoD; IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(command) .artifactStore(new ExtractedArtifactStoreBuilder() .defaults(command) .download(new DownloadConfigBuilder() .defaultsForCommand(command) .proxyFactory(new HttpProxyFactory("fooo", 1234)))) .build(); recording.end(); } @Test public void testCustomizeArtifactStorage() throws IOException { IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .net(new Net(Network.getFreeServerPort(), Network.localhostIsIPv6())) .build(); // -> // ... recording.begin(); IDirectory artifactStorePath = new FixedPath(System.getProperty("user.home") + "/.embeddedMongodbCustomPath"); ITempNaming executableNaming = new UUIDTempNaming(); Command command = Command.MongoD; IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(command) .artifactStore(new ExtractedArtifactStoreBuilder() .defaults(command) .download(new DownloadConfigBuilder() .defaultsForCommand(command) .artifactStorePath(artifactStorePath)) .executableNaming(executableNaming)) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); MongodExecutable mongodExe = runtime.prepare(mongodConfig); recording.end(); // ... // <- MongodProcess mongod = mongodExe.start(); mongod.stop(); mongodExe.stop(); } @Test public void testCustomOutputToConsolePrefix() { // -> // ... recording.begin(); ProcessOutput processOutput = new ProcessOutput(Processors.namedConsole("[mongod>]"), Processors.namedConsole("[MONGOD>]"), Processors.namedConsole("[console>]")); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(Command.MongoD) .processOutput(processOutput) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); recording.end(); // ... // <- } @Test public void testCustomOutputToFile() throws FileNotFoundException, IOException { recording.include(FileStreamProcessor.class, Includes.WithoutImports, Includes.WithoutPackage, Includes.Trim); // -> // ... recording.begin(); IStreamProcessor mongodOutput = Processors.named("[mongod>]", new FileStreamProcessor(File.createTempFile("mongod", "log"))); IStreamProcessor mongodError = new FileStreamProcessor(File.createTempFile("mongod-error", "log")); IStreamProcessor commandsOutput = Processors.namedConsole("[console>]"); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(Command.MongoD) .processOutput(new ProcessOutput(mongodOutput, mongodError, commandsOutput)) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); recording.end(); // ... // <- } @Test public void testCustomOutputToLogging() throws FileNotFoundException, IOException { // -> // ... recording.begin(); Logger logger = Logger.getLogger(getClass().getName()); ProcessOutput processOutput = new ProcessOutput(Processors.logTo(logger, Level.INFO), Processors.logTo(logger, Level.SEVERE), Processors.named("[console>]", Processors.logTo(logger, Level.FINE))); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaultsWithLogger(Command.MongoD, logger) .processOutput(processOutput) .artifactStore(new ExtractedArtifactStoreBuilder() .defaults(Command.MongoD) .download(new DownloadConfigBuilder() .defaultsForCommand(Command.MongoD) .progressListener(new LoggingProgressListener(logger, Level.FINE)))) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); recording.end(); // ... // <- } // #### ... to default java logging (the easy way) @Test public void testDefaultOutputToLogging() throws FileNotFoundException, IOException { // -> // ... recording.begin(); Logger logger = Logger.getLogger(getClass().getName()); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaultsWithLogger(Command.MongoD, logger) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); recording.end(); // ... // <- } // #### ... to null device @Test public void testDefaultOutputToNone() throws IOException { int port = 12345; IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Versions.withFeatures(new GenericVersion("2.7.1"), Feature.SYNC_DELAY)) .net(new Net(port, Network.localhostIsIPv6())) .build(); // -> // ... recording.begin(); Logger logger = Logger.getLogger(getClass().getName()); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaultsWithLogger(Command.MongoD, logger) .processOutput(ProcessOutput.getDefaultInstanceSilent()) .build(); MongodStarter runtime = MongodStarter.getInstance(runtimeConfig); recording.end(); // ... // <- MongodProcess mongod = null; MongodExecutable mongodExecutable = null; try { mongodExecutable = runtime.prepare(mongodConfig); mongod = mongodExecutable.start(); try (MongoClient mongo = new MongoClient("localhost", port)) { DB db = mongo.getDB("test"); DBCollection col = db.createCollection("testCol", new BasicDBObject()); col.save(new BasicDBObject("testDoc", new Date())); } } finally { if (mongod != null) { mongod.stop(); } if (mongodExecutable != null) mongodExecutable.stop(); } } // ### Custom Version @Test public void testCustomVersion() throws IOException { // -> // ... recording.begin(); int port = 12345; IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Versions.withFeatures(new GenericVersion("2.7.1"), Feature.SYNC_DELAY)) .net(new Net(port, Network.localhostIsIPv6())) .build(); MongodStarter runtime = MongodStarter.getDefaultInstance(); MongodProcess mongod = null; MongodExecutable mongodExecutable = null; try { mongodExecutable = runtime.prepare(mongodConfig); mongod = mongodExecutable.start(); // <- recording.end(); try (MongoClient mongo = new MongoClient("localhost", port)) { DB db = mongo.getDB("test"); DBCollection col = db.createCollection("testCol", new BasicDBObject()); col.save(new BasicDBObject("testDoc", new Date())); } recording.begin(); // -> // ... } finally { if (mongod != null) { mongod.stop(); } if (mongodExecutable != null) mongodExecutable.stop(); } recording.end(); // ... // <- } // ### Main Versions @Test public void testMainVersions() throws UnknownHostException, IOException { // -> recording.begin(); IVersion version = Version.V2_2_5; // uses latest supported 2.2.x Version version = Version.Main.V2_2; // uses latest supported production version version = Version.Main.PRODUCTION; // uses latest supported development version version = Version.Main.DEVELOPMENT; recording.end(); // <- } // ### Use Free Server Port /* // -> Warning: maybe not as stable, as expected. // <- */ // #### ... by hand @Test public void testFreeServerPort() throws UnknownHostException, IOException { // -> // ... recording.begin(); int port = Network.getFreeServerPort(); recording.end(); // ... // <- } // #### ... automagic @Test public void testFreeServerPortAuto() throws UnknownHostException, IOException { // -> // ... recording.begin(); IMongodConfig mongodConfig = new MongodConfigBuilder().version(Version.Main.PRODUCTION).build(); MongodStarter runtime = MongodStarter.getDefaultInstance(); MongodExecutable mongodExecutable = null; MongodProcess mongod = null; try { mongodExecutable = runtime.prepare(mongodConfig); mongod = mongodExecutable.start(); try (MongoClient mongo = new MongoClient( new ServerAddress(mongodConfig.net().getServerAddress(), mongodConfig.net().getPort()))) { // <- recording.end(); DB db = mongo.getDB("test"); DBCollection col = db.createCollection("testCol", new BasicDBObject()); col.save(new BasicDBObject("testDoc", new Date())); recording.begin(); } // -> // ... } finally { if (mongod != null) { mongod.stop(); } if (mongodExecutable != null) mongodExecutable.stop(); } recording.end(); // ... // <- } // ### ... custom timeouts @Test public void testCustomTimeouts() throws UnknownHostException, IOException { // -> // ... recording.begin(); IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .timeout(new Timeout(30000)) .build(); recording.end(); // ... // <- } // ### Command Line Post Processing @Test public void testCommandLinePostProcessing() { // -> // ... recording.begin(); ICommandLinePostProcessor postProcessor = // ... // <- new ICommandLinePostProcessor() { @Override public List<String> process(Distribution distribution, List<String> args) { return null; } }; recording.end(); // -> recording.begin(); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaults(Command.MongoD) .commandLinePostProcessor(postProcessor) .build(); recording.end(); // ... // <- } // ### Custom Command Line Options /* // -> We changed the syncDelay to 0 which turns off sync to disc. To turn on default value used defaultSyncDelay(). // <- */ @Test public void testCommandLineOptions() throws UnknownHostException, IOException { // -> recording.begin(); IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .cmdOptions(new MongoCmdOptionsBuilder() .syncDelay(10) .useNoPrealloc(false) .useSmallFiles(false) .useNoJournal(false) .enableTextSearch(true) .build()) .build(); recording.end(); // ... // <- } // ### Snapshot database files from temp dir /* // -> We changed the syncDelay to 0 which turns off sync to disc. To get the files to create an snapshot you must turn on default value (use defaultSyncDelay()). // <- */ @Test public void testSnapshotDbFiles() throws UnknownHostException, IOException { File destination = null; // -> recording.begin(); IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .processListener(new ProcessListenerBuilder() .copyDbFilesBeforeStopInto(destination) .build()) .cmdOptions(new MongoCmdOptionsBuilder() .defaultSyncDelay() .build()) .build(); recording.end(); // ... // <- } // ### Custom database directory /* // -> If you set a custom database directory, it will not be deleted after shutdown // <- */ @Test public void testCustomDatabaseDirectory() throws UnknownHostException, IOException { // -> recording.begin(); Storage replication = new Storage("/custom/databaseDir",null,0); IMongodConfig mongodConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .replication(replication) .build(); recording.end(); // ... // <- } // ### Start mongos with mongod instance // @include StartConfigAndMongoDBServerTest.java // ## Common Errors // ### Executable Collision /* // -> There is a good chance of filename collisions if you use a custom naming schema for the executable (see [Usage - custom mongod filename](#usage---custom-mongod-filename)). If you got an exception, then you should make your RuntimeConfig or MongoStarter class or jvm static (static final in your test class or singleton class for all tests). // <- */ @Test public void importJsonIntoMongoDB() throws UnknownHostException, IOException { String jsonFile = Thread.currentThread().getContextClassLoader().getResource("sample.json").toString(); jsonFile = jsonFile.replaceFirst("file:", ""); String defaultHost = "localhost"; recording.begin(); int defaultConfigPort = Network.getFreeServerPort(); String database = "importTestDB"; String collection = "importedCollection"; IMongodConfig mongoConfigConfig = new MongodConfigBuilder() .version(Version.Main.PRODUCTION) .net(new Net(defaultConfigPort, Network.localhostIsIPv6())) .build(); MongodExecutable mongodExecutable = MongodStarter.getDefaultInstance().prepare(mongoConfigConfig); MongodProcess mongod = mongodExecutable.start(); try { IMongoImportConfig mongoImportConfig = new MongoImportConfigBuilder() .version(Version.Main.PRODUCTION) .net(new Net(defaultConfigPort, Network.localhostIsIPv6())) .db(database) .collection(collection) .upsert(true) .dropCollection(true) .jsonArray(true) .importFile(jsonFile) .build(); MongoImportExecutable mongoImportExecutable = MongoImportStarter.getDefaultInstance().prepare(mongoImportConfig); MongoImportProcess mongoImport = mongoImportExecutable.start(); try { recording.end(); MongoClient mongoClient = new MongoClient(defaultHost, defaultConfigPort); System.out.println("DB Names: " + mongoClient.getDatabaseNames()); recording.begin(); } finally { mongoImport.stop(); } } finally { mongod.stop(); } recording.end(); } }
package Ventanas.Fx; import javafx.animation.*; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.scene.CacheHint; import javafx.scene.Node; import javafx.util.Duration; public class ShakeTransition extends Transition { private final Interpolator WEB_EASE = Interpolator.SPLINE(0.25, 0.1, 0.25, 1); private final Timeline timeline; private final Node node; private boolean oldCache = false; private CacheHint oldCacheHint = CacheHint.DEFAULT; private final boolean useCache=true; private final double xIni; private final DoubleProperty x = new SimpleDoubleProperty(); /** * Creamos una nueva ShakeTransition (Efecto terremoto) * * @param node El nodo que va a ser afectado */ public ShakeTransition(final Node node) { this.node=node; statusProperty().addListener((ov, t, newStatus) -> { switch(newStatus) { case RUNNING: starting(); break; default: stopping(); break; } }); //Creamos el efecto dentro de la línea de tiempo modificando la propiedad de X this.timeline= new Timeline( new KeyFrame(Duration.millis(0), new KeyValue(x, 0, WEB_EASE)), new KeyFrame(Duration.millis(100), new KeyValue(x, -10, WEB_EASE)), new KeyFrame(Duration.millis(200), new KeyValue(x, 10, WEB_EASE)), new KeyFrame(Duration.millis(300), new KeyValue(x, -10, WEB_EASE)), new KeyFrame(Duration.millis(400), new KeyValue(x, 10, WEB_EASE)), new KeyFrame(Duration.millis(500), new KeyValue(x, -10, WEB_EASE)), new KeyFrame(Duration.millis(600), new KeyValue(x, 10, WEB_EASE)), new KeyFrame(Duration.millis(700), new KeyValue(x, -10, WEB_EASE)), new KeyFrame(Duration.millis(800), new KeyValue(x, 10, WEB_EASE)), new KeyFrame(Duration.millis(900), new KeyValue(x, -10, WEB_EASE)), new KeyFrame(Duration.millis(1000), new KeyValue(x, 0, WEB_EASE)) ); //Añadimos el efecto al Nodo que hemos pasado por parámetro e indicamos la duración. xIni=node.getTranslateX(); x.addListener((ob,n,n1)->(node).setTranslateX(xIni+n1.doubleValue())); setCycleDuration(Duration.seconds(1)); setDelay(Duration.seconds(0.2)); } /** * Cuando empieza la animación */ protected final void starting() { if (useCache) { oldCache = node.isCache(); oldCacheHint = node.getCacheHint(); node.setCache(true); node.setCacheHint(CacheHint.SPEED); } } /** * Cuando está parando la animación */ protected final void stopping() { if (useCache) { node.setCache(oldCache); node.setCacheHint(oldCacheHint); } } @Override protected void interpolate(double d) { timeline.playFrom(Duration.seconds(d)); timeline.stop(); } }
package org.infinispan.api; import static org.infinispan.test.TestingUtil.v; import static org.testng.AssertJUnit.assertEquals; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.transaction.NotSupportedException; import javax.transaction.SystemException; import javax.transaction.TransactionManager; import org.infinispan.configuration.cache.CacheMode; import org.infinispan.configuration.cache.Configuration; import org.infinispan.configuration.cache.ConfigurationBuilder; import org.infinispan.manager.EmbeddedCacheManager; import org.infinispan.test.TestingUtil; import org.infinispan.test.fwk.TestCacheManagerFactory; import org.infinispan.util.concurrent.IsolationLevel; import org.testng.annotations.Test; /** * Tests the {@link org.infinispan.Cache} public API at a high level * * @author <a href="mailto:manik@jboss.org">Manik Surtani</a> */ @Test(groups = "functional") public abstract class CacheAPITest extends APINonTxTest { @Override protected EmbeddedCacheManager createCacheManager() throws Exception { // start a single cache instance ConfigurationBuilder cb = getDefaultStandaloneCacheConfig(true); cb.locking().isolationLevel(getIsolationLevel()); addEviction(cb); amend(cb); EmbeddedCacheManager cm = TestCacheManagerFactory.createCacheManager(false); cm.defineConfiguration("test", cb.build()); cache = cm.getCache("test"); return cm; } protected void amend(ConfigurationBuilder cb) { } protected abstract IsolationLevel getIsolationLevel(); protected ConfigurationBuilder addEviction(ConfigurationBuilder cb) { return cb; } /** * Tests that the configuration contains the values expected, as well as immutability of certain elements */ public void testConfiguration() { Configuration c = cache.getCacheConfiguration(); assert CacheMode.LOCAL.equals(c.clustering().cacheMode()); assert null != c.transaction().transactionManagerLookup(); } public void testGetMembersInLocalMode() { assert manager(cache).getAddress() == null : "Cache members should be null if running in LOCAL mode"; } public void testRollbackAfterOverwrite() throws Exception { String key = "key", value = "value", value2 = "value2"; int size; cache.put(key, value); assert cache.get(key).equals(value); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value); TestingUtil.getTransactionManager(cache).begin(); try { cache.put(key, value2); assert cache.get(key).equals(value2); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value2); } finally { TestingUtil.getTransactionManager(cache).rollback(); } assert cache.get(key).equals(value); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value); } public void testRollbackAfterRemove() throws Exception { String key = "key", value = "value"; int size; cache.put(key, value); assert cache.get(key).equals(value); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value); TestingUtil.getTransactionManager(cache).begin(); try { cache.remove(key); assert cache.get(key) == null; size = 0; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); } finally { TestingUtil.getTransactionManager(cache).rollback(); } assert cache.get(key).equals(value); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value); } public void testEntrySetEqualityInTx(Method m) throws Exception { Map<Object, Object> dataIn = new HashMap<>(); dataIn.put(1, v(m, 1)); dataIn.put(2, v(m, 2)); cache.putAll(dataIn); TransactionManager tm = cache.getAdvancedCache().getTransactionManager(); tm.begin(); try { Map<Integer, String> txDataIn = new HashMap<>(); txDataIn.put(3, v(m, 3)); Map<Object, Object> allEntriesIn = new HashMap<>(dataIn); // Modify expectations to include data to be included allEntriesIn.putAll(txDataIn); // Add an entry within tx cache.putAll(txDataIn); Set<Map.Entry<Object, Object>> entries = cache.entrySet(); assertEquals(allEntriesIn.entrySet(), entries); } finally { tm.rollback(); } } public void testEntrySetIterationBeforeInTx(Method m) throws Exception { Map<Integer, String> dataIn = new HashMap<>(); dataIn.put(1, v(m, 1)); dataIn.put(2, v(m, 2)); cache.putAll(dataIn); Map<Object, Object> foundValues = new HashMap<>(); TransactionManager tm = cache.getAdvancedCache().getTransactionManager(); tm.begin(); try { Set<Entry<Object, Object>> entries = cache.entrySet(); // Add an entry within tx cache.put(3, v(m, 3)); cache.put(4, v(m, 4)); for (Entry<Object, Object> entry : entries) { foundValues.put(entry.getKey(), entry.getValue()); } } finally { tm.rollback(); } assertEquals(4, foundValues.size()); assertEquals(v(m, 1), foundValues.get(1)); assertEquals(v(m, 2), foundValues.get(2)); assertEquals(v(m, 3), foundValues.get(3)); assertEquals(v(m, 4), foundValues.get(4)); } public void testEntrySetIterationAfterInTx(Method m) throws Exception { Map<Integer, String> dataIn = new HashMap<>(); dataIn.put(1, v(m, 1)); dataIn.put(2, v(m, 2)); cache.putAll(dataIn); Map<Object, Object> foundValues = new HashMap<>(); TransactionManager tm = cache.getAdvancedCache().getTransactionManager(); tm.begin(); try { Set<Entry<Object, Object>> entries = cache.entrySet(); Iterator<Entry<Object, Object>> itr = entries.iterator(); // Add an entry within tx cache.put(3, v(m, 3)); cache.put(4, v(m, 4)); while (itr.hasNext()) { Entry<Object, Object> entry = itr.next(); foundValues.put(entry.getKey(), entry.getValue()); } } finally { tm.rollback(); } assertEquals(4, foundValues.size()); assertEquals(v(m, 1), foundValues.get(1)); assertEquals(v(m, 2), foundValues.get(2)); assertEquals(v(m, 3), foundValues.get(3)); assertEquals(v(m, 4), foundValues.get(4)); } public void testRollbackAfterPut() throws Exception { String key = "key", value = "value", key2 = "keyTwo", value2 = "value2"; int size; cache.put(key, value); assert cache.get(key).equals(value); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value); TestingUtil.getTransactionManager(cache).begin(); try { cache.put(key2, value2); assert cache.get(key2).equals(value2); assert cache.keySet().contains(key2); size = 2; log.trace(cache.size()); assert size == cache.size(); assert size == cache.keySet().size(); assert size == cache.values().size(); assert size == cache.entrySet().size(); assert cache.values().contains(value2); } finally { TestingUtil.getTransactionManager(cache).rollback(); } assert cache.get(key).equals(value); size = 1; assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size(); assert cache.keySet().contains(key); assert cache.values().contains(value); } public void testSizeAfterClear() { for (int i = 0; i < 10; i++) { cache.put(i, "value" + i); } cache.clear(); assert cache.isEmpty(); } public void testPutIfAbsentAfterRemoveInTx() throws SystemException, NotSupportedException { String key = "key_1", old_value = "old_value"; cache.put(key, old_value); assert cache.get(key).equals(old_value); TestingUtil.getTransactionManager(cache).begin(); try { assert cache.remove(key).equals(old_value); assert cache.get(key) == null; // assertEquals(cache.putIfAbsent(key, new_value), null); } finally { TestingUtil.getTransactionManager(cache).rollback(); } assertEquals(old_value, cache.get(key)); } }
/* * Copyright (C) 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.copybara.git.gerritapi; import com.google.api.client.util.Key; import javax.annotation.Nullable; /** * See https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#restore-input */ public class RestoreInput { @Key String message; private RestoreInput(@Nullable String message) { this.message = message; } public static RestoreInput create(@Nullable String message) { return new RestoreInput(message); } public static RestoreInput createWithoutComment() { return new RestoreInput(/*message=*/null); } }
/* * Copyright 2012 William Hamilton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cpsr.environment.components; import java.io.Serializable; import cpsr.environment.DataSet; import cpsr.environment.exceptions.EnvironmentException; /** * Class defines default actions. * Can be extended, but all children must still * represent each unique action by a positive * integer. * * @author William Hamilton */ @SuppressWarnings("serial") public class Action implements Serializable { /** * @serialField * @deprecated */ protected DataSet dataSet; /** * @serialField */ protected int id, maxID; /** * Default constructor DO NOT USE. */ protected Action() { super(); } /** * Constructs Action using only id. * Use this constructor if adding action to data set * or if one plans on setting the DataSet at a later time. * * @param id The action id. */ public Action(int id) { this.id = id; } /** * Constructs Action using only id. * Use this constructor if adding action to data set * or if one plans on setting the DataSet at a later time. * * @param id The action id. */ public Action(int id, int maxID) { this.id = id; this.maxID = maxID; } /** * Constructs action associated with a particular * DataSet using specified integer id. * * @param id * @param dataSet * @deprecated */ public Action(int id, DataSet dataSet) { this.id = id; this.dataSet = dataSet; } /** * Returns a binary string representing action with length equal * to the max length specified by max action hash code of DataSet. * * @return Binary representation of action. * @throws EnvironmentException */ public String toBinaryString() throws EnvironmentException { int targetLength = (Integer.toBinaryString(maxID)).length(); String binaryRep = Integer.toBinaryString(this.hashCode()); if(targetLength < binaryRep.length()) { throw new EnvironmentException("Integer ID of action exceeds maximum specifed by data set"); } else if(targetLength > binaryRep.length()) { for(int i = 0; i < targetLength-binaryRep.length(); i++) { binaryRep = "0"+binaryRep; } } return binaryRep; } /** * Returns DataSet associated with this action. * * @return DataSet associated with this action. * @deprecated */ public DataSet getDataSet() { try { return dataSet; } catch(NullPointerException ex) { throw new EnvironmentException("Child classes of Observation must explicitly set dataSet field in constructor, or" + "by using setDataSet(DataSet) method"); } } /** * Return unique identifying integer ID for observation. * * @return Unique identifying integer ID for observation */ public int getID() { try { return id; } catch(NullPointerException ex) { throw new EnvironmentException("Child classes of Observation must explicitly set id field in constructor"); } } /** * Sets the data set. */ public void setData(DataSet dataSet) { this.dataSet = dataSet; } public void setMaxID(int pMaxID) { maxID = pMaxID; } @Override public int hashCode() { return getID(); } @Override public boolean equals(Object ob) { return id == ((Action)ob).getID(); } }
/* * Licensed to Elastic Search and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Elastic Search licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.common.thread.ThreadLocals; import java.util.Arrays; /** * @author kimchy (shay.banon) */ public class Unicode { private static ThreadLocal<ThreadLocals.CleanableValue<UnicodeUtil.UTF8Result>> cachedUtf8Result = new ThreadLocal<ThreadLocals.CleanableValue<UnicodeUtil.UTF8Result>>() { @Override protected ThreadLocals.CleanableValue<UnicodeUtil.UTF8Result> initialValue() { return new ThreadLocals.CleanableValue<UnicodeUtil.UTF8Result>(new UnicodeUtil.UTF8Result()); } }; private static ThreadLocal<ThreadLocals.CleanableValue<UTF16Result>> cachedUtf16Result = new ThreadLocal<ThreadLocals.CleanableValue<UTF16Result>>() { @Override protected ThreadLocals.CleanableValue<UTF16Result> initialValue() { return new ThreadLocals.CleanableValue<UTF16Result>(new UTF16Result()); } }; public static byte[] fromStringAsBytes(String source) { if (source == null) { return null; } UnicodeUtil.UTF8Result result = unsafeFromStringAsUtf8(source); return Arrays.copyOfRange(result.result, 0, result.length); } public static UnicodeUtil.UTF8Result fromStringAsUtf8(String source) { if (source == null) { return null; } UnicodeUtil.UTF8Result result = new UnicodeUtil.UTF8Result(); UnicodeUtil.UTF16toUTF8(source, 0, source.length(), result); return result; } public static UnicodeUtil.UTF8Result unsafeFromStringAsUtf8(String source) { if (source == null) { return null; } UnicodeUtil.UTF8Result result = cachedUtf8Result.get().get(); UnicodeUtil.UTF16toUTF8(source, 0, source.length(), result); return result; } public static String fromBytes(byte[] source) { return fromBytes(source, 0, source.length); } public static String fromBytes(byte[] source, int offset, int length) { if (source == null) { return null; } UTF16Result result = unsafeFromBytesAsUtf16(source, offset, length); return new String(result.result, 0, result.length); } public static UTF16Result fromBytesAsUtf16(byte[] source) { return fromBytesAsUtf16(source, 0, source.length); } public static UTF16Result fromBytesAsUtf16(byte[] source, int offset, int length) { if (source == null) { return null; } UTF16Result result = new UTF16Result(); UTF8toUTF16(source, offset, length, result); return result; } public static UTF16Result unsafeFromBytesAsUtf16(byte[] source) { return unsafeFromBytesAsUtf16(source, 0, source.length); } public static UTF16Result unsafeFromBytesAsUtf16(byte[] source, int offset, int length) { if (source == null) { return null; } UTF16Result result = cachedUtf16Result.get().get(); UTF8toUTF16(source, offset, length, result); return result; } // LUCENE MONITOR // an optimized version of UTF16Result that does not hold the offsets since we don't need them // they are only used with continuous writing to the same utf16 (without "clearing it") public static final class UTF16Result { public char[] result = new char[10]; // public int[] offsets = new int[10]; public int length; public void setLength(int newLength) { if (result.length < newLength) { char[] newArray = new char[(int) (1.5 * newLength)]; System.arraycopy(result, 0, newArray, 0, length); result = newArray; } length = newLength; } public void copyText(UTF16Result other) { setLength(other.length); System.arraycopy(other.result, 0, result, 0, length); } } /** * Convert UTF8 bytes into UTF16 characters. If offset * is non-zero, conversion starts at that starting point * in utf8, re-using the results from the previous call * up until offset. */ public static void UTF8toUTF16(final byte[] utf8, final int offset, final int length, final UTF16Result result) { final int end = offset + length; char[] out = result.result; // if (result.offsets.length <= end) { // int[] newOffsets = new int[2 * end]; // System.arraycopy(result.offsets, 0, newOffsets, 0, result.offsets.length); // result.offsets = newOffsets; // } // final int[] offsets = result.offsets; // If incremental decoding fell in the middle of a // single unicode character, rollback to its start: int upto = offset; // while (offsets[upto] == -1) // upto--; int outUpto = 0; // offsets[upto]; // Pre-allocate for worst case 1-for-1 if (outUpto + length >= out.length) { char[] newOut = new char[2 * (outUpto + length)]; System.arraycopy(out, 0, newOut, 0, outUpto); result.result = out = newOut; } while (upto < end) { final int b = utf8[upto] & 0xff; final int ch; upto += 1; // CHANGE // offsets[upto++] = outUpto; if (b < 0xc0) { assert b < 0x80; ch = b; } else if (b < 0xe0) { ch = ((b & 0x1f) << 6) + (utf8[upto] & 0x3f); upto += 1; // CHANGE // offsets[upto++] = -1; } else if (b < 0xf0) { ch = ((b & 0xf) << 12) + ((utf8[upto] & 0x3f) << 6) + (utf8[upto + 1] & 0x3f); upto += 2; // CHANGE // offsets[upto++] = -1; // offsets[upto++] = -1; } else { assert b < 0xf8; ch = ((b & 0x7) << 18) + ((utf8[upto] & 0x3f) << 12) + ((utf8[upto + 1] & 0x3f) << 6) + (utf8[upto + 2] & 0x3f); upto += 3; // CHANGE // offsets[upto++] = -1; // offsets[upto++] = -1; // offsets[upto++] = -1; } if (ch <= UNI_MAX_BMP) { // target is a character <= 0xFFFF out[outUpto++] = (char) ch; } else { // target is a character in range 0xFFFF - 0x10FFFF final int chHalf = ch - HALF_BASE; out[outUpto++] = (char) ((chHalf >> HALF_SHIFT) + UnicodeUtil.UNI_SUR_HIGH_START); out[outUpto++] = (char) ((chHalf & HALF_MASK) + UnicodeUtil.UNI_SUR_LOW_START); } } // offsets[upto] = outUpto; result.length = outUpto; } private static final long UNI_MAX_BMP = 0x0000FFFF; private static final int HALF_BASE = 0x0010000; private static final long HALF_SHIFT = 10; private static final long HALF_MASK = 0x3FFL; }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.tablesaw.plotting; import org.junit.Test; import tech.tablesaw.plotting.StandardColors; import java.awt.*; import java.util.List; import static org.junit.Assert.assertFalse; /** * */ public class StandardColorsTest { @Test public void testStandardColors() { List<Color> colors = StandardColors.standardColors(); assertFalse(colors.isEmpty()); //System.out.println(colors); } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.core.ep.consumer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import java.io.InputStream; import java.util.List; import org.apache.olingo.odata2.api.edm.EdmEntitySetInfo; import org.apache.olingo.odata2.api.ep.EntityProviderException; import org.apache.olingo.odata2.api.servicedocument.ServiceDocument; import org.junit.Test; public class JsonServiceDocumentConsumerTest { @Test public void test() throws EntityProviderException { JsonServiceDocumentConsumer parser = new JsonServiceDocumentConsumer(); InputStream in = ClassLoader.class.getResourceAsStream("/svcDocJson.json"); ServiceDocument serviceDoc = parser.parseJson(in); List<EdmEntitySetInfo> entitySetsInfo = serviceDoc.getEntitySetsInfo(); assertNotNull(entitySetsInfo); assertEquals(7, entitySetsInfo.size()); for (EdmEntitySetInfo entitySetInfo : entitySetsInfo) { if (!entitySetInfo.isDefaultEntityContainer()) { if ("Container2".equals(entitySetInfo.getEntityContainerName())) { assertEquals("Photos", entitySetInfo.getEntitySetName()); } else if ("Container.Nr1".equals(entitySetInfo.getEntityContainerName())) { assertEquals("Employees", entitySetInfo.getEntitySetName()); } else { fail(); } } } } @Test public void checkDecodingOfEntitySetNames() throws Exception { JsonServiceDocumentConsumer parser = new JsonServiceDocumentConsumer(); InputStream in = ClassLoader.class.getResourceAsStream("/svcDocJson.json"); ServiceDocument serviceDoc = parser.parseJson(in); EdmEntitySetInfo entitySetInfo = serviceDoc.getEntitySetsInfo().get(6); assertEquals(":EncodedName", entitySetInfo.getEntitySetName()); assertEquals("%3AEncodedName", entitySetInfo.getEntitySetUri().toASCIIString()); } @Test(expected = EntityProviderException.class) public void testInvalidServiceDocument() throws EntityProviderException { JsonServiceDocumentConsumer parser = new JsonServiceDocumentConsumer(); InputStream in = ClassLoader.class.getResourceAsStream("/invalidSvcDocJson.json"); parser.parseJson(in); } @Test(expected = EntityProviderException.class) public void testServiceDocumentWithInvalidStructure() throws EntityProviderException { JsonServiceDocumentConsumer parser = new JsonServiceDocumentConsumer(); InputStream in = ClassLoader.class.getResourceAsStream("/invalidSvcDocJson2.json"); parser.parseJson(in); } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.openqa.selenium.environment.GlobalTestEnvironment; import org.openqa.selenium.environment.InProcessTestEnvironment; import org.openqa.selenium.html5.Html5Tests; import org.openqa.selenium.interactions.InteractionTests; import org.openqa.selenium.logging.AvailableLogsTest; import org.openqa.selenium.logging.GetLogsTest; import org.openqa.selenium.logging.PerformanceLogTypeTest; import org.openqa.selenium.logging.PerformanceLoggingTest; import org.openqa.selenium.support.ui.SelectElementTest; import org.openqa.selenium.testing.JUnit4TestBase; @RunWith(Suite.class) @Suite.SuiteClasses({ AlertsTest.class, AtomsInjectionTest.class, AvailableLogsTest.class, ByTest.class, ChildrenFindingTest.class, ClearTest.class, ClickScrollingTest.class, ClickTest.class, CookieImplementationTest.class, ContentEditableTest.class, CorrectEventFiringTest.class, ElementAttributeTest.class, ElementEqualityTest.class, ElementFindingTest.class, ElementSelectingTest.class, ErrorsTest.class, ExecutingAsyncJavascriptTest.class, ExecutingJavascriptTest.class, FormHandlingTest.class, FrameSwitchingTest.class, GetLogsTest.class, I18nTest.class, ImplicitWaitTest.class, JavascriptEnabledDriverTest.class, MiscTest.class, ObjectStateAssumptionsTest.class, PageLoadingTest.class, PerformanceLoggingTest.class, PerformanceLogTypeTest.class, PositionAndSizeTest.class, ProxySettingTest.class, ReferrerTest.class, CssValueTest.class, RotatableTest.class, SelectElementTest.class, SelectElementHandlingTest.class, SessionHandlingTest.class, SlowLoadingPageTest.class, StaleElementReferenceTest.class, SvgElementTest.class, SvgDocumentTest.class, TakesScreenshotTest.class, TextHandlingTest.class, TextPagesTest.class, TypingTest.class, UnexpectedAlertBehaviorTest.class, UploadTest.class, VisibilityTest.class, WebElementTest.class, WindowSwitchingTest.class, ContextSwitchingTest.class, WindowTest.class, Html5Tests.class, InteractionTests.class }) public class StandardSeleniumTests { @BeforeClass public static void prepareCommonEnvironment() { GlobalTestEnvironment.get(InProcessTestEnvironment.class); } @AfterClass public static void cleanUpDriver() { JUnit4TestBase.removeDriver(); } }
import java.util.ArrayList; import java.util.List; /** * Created by ddning on 2018/3/7. */ public class Solution { public long numSubarrayBoundedMax(int[] A, int L, int R) { List<Integer> big = new ArrayList<Integer>(); for(int i = 0; i < A.length; i++){ if(A[i] > R){ big.add(i); } } int begin = 0,end = A.length; long sum = 0,temp = 0; for(int j = 0; j < big.size(); j++){ end = big.get(j); sum += getSubNum(begin,end,A,L); begin = end+1; // temp = combine(end-begin); // int sumSmall = 0; // for(int k = begin; k < end;){ // if(A[k] < L){ // int smallBegin = k; // while(A[k] < L){ // k++; // } // sumSmall += combine(k - smallBegin); // } // else{ // k++; // } // } // sum =sum + (temp - sumSmall); } if(big.get(big.size() -1) < A.length){ begin = big.get(big.size() -1)+1; end = A.length; sum += getSubNum(begin,end,A,L); } return sum; } public long getSubNum(int begin,int end,int[] A,int L){ long sum = 0; long temp = 0; temp = combine(end-begin); int sumSmall = 0; for(int k = begin; k < end;){ if(A[k] < L){ int smallBegin = k; while(A[k] < L){ k++; } sumSmall += combine(k - smallBegin); } else{ k++; } } sum =sum + (temp - sumSmall); return sum; } public long combine(int n) { long temp = 0; for(int j = 1; j <= n; j++){ temp += combination(n,j); } return temp; } public long combination(int n, int m) { return (n >= m) ? factorial(n) / factorial(n - m) / factorial(m) : 0; } public long factorial(int n) { return (n > 1) ? n * factorial(n - 1) : 1; } public static void main(String[] args){ Solution s = new Solution(); long sum = s.numSubarrayBoundedMax(new int[]{2,1,4,3,5,6,7,8,3,1,2,9},2,4); // long sum = s.combine(5); System.out.println(sum); } }
package com.download; public interface CallBack { void exe(String string); }
/*|----------------------------------------------------------------------------- *| This source code is provided under the Apache 2.0 license -- *| and is provided AS IS with no warranty or guarantee of fit for purpose. -- *| See the project's LICENSE.md for details. -- *| Copyright (C) 2019-2022 Refinitiv. All rights reserved. -- *|----------------------------------------------------------------------------- */ package com.refinitiv.eta.valueadd.reactor; class RestReactorSubmitOptions { Object _userSpecObj; ReactorAuthTokenInfo _tokenInfo; RestConnectOptions _connectOptions; /** * Instantiates a new reactor submit options. */ public RestReactorSubmitOptions() { clear(); } /** * Clears this object for reuse. */ public void clear() { _userSpecObj = null; _tokenInfo = null; _connectOptions = null; } /** * User-specified object to return as the application receives events related to this request. * * @return the user-specified object */ public Object userSpecObj() { return _userSpecObj; } /** * User-specified object to return as the application receives events related to this request. * * @param userSpecObj the user spec obj */ public void userSpecObj(Object userSpecObj) { _userSpecObj = userSpecObj; } public void tokenInformation(ReactorAuthTokenInfo tokenInfo) { _tokenInfo = tokenInfo; } public ReactorAuthTokenInfo tokenInformation() { return _tokenInfo; } public void connectOptions(RestConnectOptions options) { _connectOptions = options; } public RestConnectOptions connectOptions() { return _connectOptions; } }
package org.uma.jmetal.runner.multiobjective; import org.uma.jmetal.algorithm.Algorithm; import org.uma.jmetal.algorithm.multiobjective.nsgaiii.NSGAIIIBuilder; import org.uma.jmetal.operator.CrossoverOperator; import org.uma.jmetal.operator.MutationOperator; import org.uma.jmetal.operator.SelectionOperator; import org.uma.jmetal.operator.impl.crossover.SBXCrossover; import org.uma.jmetal.operator.impl.mutation.PolynomialMutation; import org.uma.jmetal.operator.impl.selection.BinaryTournamentSelection; import org.uma.jmetal.problem.Problem; import org.uma.jmetal.runner.AbstractAlgorithmRunner; import org.uma.jmetal.solution.DoubleSolution; import org.uma.jmetal.util.AlgorithmRunner; import org.uma.jmetal.util.JMetalException; import org.uma.jmetal.util.JMetalLogger; import org.uma.jmetal.util.ProblemUtils; import org.uma.jmetal.util.fileoutput.SolutionListOutput; import org.uma.jmetal.util.fileoutput.impl.DefaultFileOutputContext; import java.util.List; /** * Class to configure and run the NSGA-III algorithm */ public class NSGAIIIRunner extends AbstractAlgorithmRunner { /** * @param args Command line arguments. * @throws java.io.IOException * @throws SecurityException * @throws ClassNotFoundException * Usage: three options * - org.uma.jmetal.runner.multiobjective.NSGAIIIRunner * - org.uma.jmetal.runner.multiobjective.NSGAIIIRunner problemName * - org.uma.jmetal.runner.multiobjective.NSGAIIIRunner problemName paretoFrontFile */ public static void main(String[] args) throws JMetalException { Problem<DoubleSolution> problem; Algorithm<List<DoubleSolution>> algorithm; CrossoverOperator<DoubleSolution> crossover; MutationOperator<DoubleSolution> mutation; SelectionOperator<List<DoubleSolution>, DoubleSolution> selection; String problemName = "org.uma.jmetal.problem.multiobjective.dtlz.DTLZ1" ; problem = ProblemUtils.loadProblem(problemName); double crossoverProbability = 0.9 ; double crossoverDistributionIndex = 30.0 ; crossover = new SBXCrossover(crossoverProbability, crossoverDistributionIndex) ; double mutationProbability = 1.0 / problem.getNumberOfVariables() ; double mutationDistributionIndex = 20.0 ; mutation = new PolynomialMutation(mutationProbability, mutationDistributionIndex) ; selection = new BinaryTournamentSelection<DoubleSolution>(); algorithm = new NSGAIIIBuilder<>(problem) .setCrossoverOperator(crossover) .setMutationOperator(mutation) .setSelectionOperator(selection) .setMaxIterations(500) .build() ; AlgorithmRunner algorithmRunner = new AlgorithmRunner.Executor(algorithm) .execute() ; List<DoubleSolution> population = algorithm.getResult() ; long computingTime = algorithmRunner.getComputingTime() ; new SolutionListOutput(population) .setSeparator("\t") .setVarFileOutputContext(new DefaultFileOutputContext("VAR.tsv")) .setFunFileOutputContext(new DefaultFileOutputContext("FUN.tsv")) .print() ; JMetalLogger.logger.info("Total execution time: " + computingTime + "ms"); JMetalLogger.logger.info("Objectives values have been written to file FUN.tsv"); JMetalLogger.logger.info("Variables values have been written to file VAR.tsv"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.beanutils; import org.compass.collections.Predicate; import java.lang.reflect.InvocationTargetException; /** * <p>Predicate implementation that applies the given <code>Predicate</code> * to the result of calling the given property getter. * </p> * * @version $Id: BeanPredicate.java 1454597 2013-03-08 21:58:12Z britter $ */ public class BeanPredicate implements Predicate { /** * Name of the property whose value will be predicated */ private String propertyName; /** * <code>Predicate</code> to be applied to the property value */ private Predicate predicate; /** * Constructs a <code>BeanPredicate</code> that applies the given * <code>Predicate</code> to the named property value. * * @param propertyName the name of the property whose value is to be predicated, * not null * @param predicate the <code>Predicate</code> to be applied, * not null */ public BeanPredicate(String propertyName, Predicate predicate) { this.propertyName = propertyName; this.predicate = predicate; } /** * Evaluates the given object by applying the {@link #getPredicate()} * to a property value named by {@link #getPropertyName()}. * * @param object The object being evaluated * @return the result of the predicate evaluation * @throws IllegalArgumentException when the property cannot be evaluated */ public boolean evaluate(Object object) { boolean evaluation = false; try { Object propValue = PropertyUtils.getProperty(object, propertyName); evaluation = predicate.evaluate(propValue); } catch (IllegalArgumentException e) { final String errorMsg = "Problem during evaluation."; throw e; } catch (IllegalAccessException e) { final String errorMsg = "Unable to access the property provided."; throw new IllegalArgumentException(errorMsg); } catch (InvocationTargetException e) { final String errorMsg = "Exception occurred in property's getter"; throw new IllegalArgumentException(errorMsg); } catch (NoSuchMethodException e) { final String errorMsg = "Property not found."; throw new IllegalArgumentException(errorMsg); } return evaluation; } /** * Gets the name of the property whose value is to be predicated. * in the evaluation. * * @return the property name, not null */ public String getPropertyName() { return propertyName; } /** * Sets the name of the property whose value is to be predicated. * * @param propertyName the name of the property whose value is to be predicated, * not null */ public void setPropertyName(String propertyName) { this.propertyName = propertyName; } /** * Gets the <code>Predicate</code> to be applied to the value of the named property * during {@link #evaluate}. * * @return <code>Predicate</code>, not null */ public Predicate getPredicate() { return predicate; } /** * Sets the <code>Predicate</code> to be applied to the value of the named property * during {@link #evaluate(Object)}. * * @param predicate <code>Predicate</code>, not null */ public void setPredicate(Predicate predicate) { this.predicate = predicate; } }
package com.tom.test.repositories; import com.tom.test.domain.User; import org.springframework.data.repository.CrudRepository; /** * Created by tom on 6/8/2016. */ public interface UserRepository extends CrudRepository<User, Integer> { User findByUserName(String username); }
// Targeted by JavaCPP version 1.5.5: DO NOT EDIT THIS FILE package org.bytedeco.tensorflow; import org.bytedeco.tensorflow.Allocator; import java.nio.*; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.annotation.*; import static org.bytedeco.javacpp.presets.javacpp.*; import static org.bytedeco.tensorflow.global.tensorflow.*; @Namespace("tensorflow") @NoOffset @Properties(inherit = org.bytedeco.tensorflow.presets.tensorflow.class) public class TrackingAllocator extends Allocator { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public TrackingAllocator(Pointer p) { super(p); } public native @StdString BytePointer Name(); public native Pointer AllocateRaw(@Cast("size_t") long alignment, @Cast("size_t") long num_bytes); public native Pointer AllocateRaw(@Cast("size_t") long alignment, @Cast("size_t") long num_bytes, @Const @ByRef AllocationAttributes allocation_attr); public native void DeallocateRaw(Pointer ptr); public native @Cast("bool") boolean TracksAllocationSizes(); public native @Cast("size_t") long RequestedSize(@Const Pointer ptr); public native @Cast("size_t") long AllocatedSize(@Const Pointer ptr); public native @Cast("tensorflow::int64") long AllocationId(@Const Pointer ptr); public native void ClearStats(); // If the underlying allocator tracks allocation sizes, this returns // a tuple where the first value is the total number of bytes // allocated through this wrapper, the second value is the high // watermark of bytes allocated through this wrapper and the third value is // the allocated bytes through this wrapper that are still alive. If the // underlying allocator does not track allocation sizes the first // value is the total number of bytes requested through this wrapper // and the second and the third are 0. // public native @ByVal @Cast("std::tuple<size_t,size_t,size_t>*") SizeTPointer GetSizes(); // After GetRecordsAndUnRef is called, the only further calls allowed // on this wrapper are calls to DeallocateRaw with pointers that // were allocated by this wrapper and have not yet been // deallocated. After this call completes and all allocated pointers // have been deallocated the wrapper will delete itself. public native @ByVal AllocRecordVector GetRecordsAndUnRef(); // Returns a copy of allocation records collected so far. public native @ByVal AllocRecordVector GetCurrentRecords(); }
/* * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* * * * * * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.invoke.MethodHandles; import java.lang.invoke.VarHandle; import java.security.AccessController; import java.security.AccessControlContext; import java.security.Permission; import java.security.Permissions; import java.security.PrivilegedAction; import java.security.ProtectionDomain; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Predicate; import java.util.concurrent.locks.LockSupport; /** * An {@link ExecutorService} for running {@link ForkJoinTask}s. * A {@code ForkJoinPool} provides the entry point for submissions * from non-{@code ForkJoinTask} clients, as well as management and * monitoring operations. * * <p>A {@code ForkJoinPool} differs from other kinds of {@link * ExecutorService} mainly by virtue of employing * <em>work-stealing</em>: all threads in the pool attempt to find and * execute tasks submitted to the pool and/or created by other active * tasks (eventually blocking waiting for work if none exist). This * enables efficient processing when most tasks spawn other subtasks * (as do most {@code ForkJoinTask}s), as well as when many small * tasks are submitted to the pool from external clients. Especially * when setting <em>asyncMode</em> to true in constructors, {@code * ForkJoinPool}s may also be appropriate for use with event-style * tasks that are never joined. All worker threads are initialized * with {@link Thread#isDaemon} set {@code true}. * * <p>A static {@link #commonPool()} is available and appropriate for * most applications. The common pool is used by any ForkJoinTask that * is not explicitly submitted to a specified pool. Using the common * pool normally reduces resource usage (its threads are slowly * reclaimed during periods of non-use, and reinstated upon subsequent * use). * * <p>For applications that require separate or custom pools, a {@code * ForkJoinPool} may be constructed with a given target parallelism * level; by default, equal to the number of available processors. * The pool attempts to maintain enough active (or available) threads * by dynamically adding, suspending, or resuming internal worker * threads, even if some tasks are stalled waiting to join others. * However, no such adjustments are guaranteed in the face of blocked * I/O or other unmanaged synchronization. The nested {@link * ManagedBlocker} interface enables extension of the kinds of * synchronization accommodated. The default policies may be * overridden using a constructor with parameters corresponding to * those documented in class {@link ThreadPoolExecutor}. * * <p>In addition to execution and lifecycle control methods, this * class provides status check methods (for example * {@link #getStealCount}) that are intended to aid in developing, * tuning, and monitoring fork/join applications. Also, method * {@link #toString} returns indications of pool state in a * convenient form for informal monitoring. * * <p>As is the case with other ExecutorServices, there are three * main task execution methods summarized in the following table. * These are designed to be used primarily by clients not already * engaged in fork/join computations in the current pool. The main * forms of these methods accept instances of {@code ForkJoinTask}, * but overloaded forms also allow mixed execution of plain {@code * Runnable}- or {@code Callable}- based activities as well. However, * tasks that are already executing in a pool should normally instead * use the within-computation forms listed in the table unless using * async event-style tasks that are not usually joined, in which case * there is little difference among choice of methods. * * <table class="plain"> * <caption>Summary of task execution methods</caption> * <tr> * <td></td> * <th scope="col"> Call from non-fork/join clients</th> * <th scope="col"> Call from within fork/join computations</th> * </tr> * <tr> * <th scope="row" style="text-align:left"> Arrange async execution</th> * <td> {@link #execute(ForkJoinTask)}</td> * <td> {@link ForkJoinTask#fork}</td> * </tr> * <tr> * <th scope="row" style="text-align:left"> Await and obtain result</th> * <td> {@link #invoke(ForkJoinTask)}</td> * <td> {@link ForkJoinTask#invoke}</td> * </tr> * <tr> * <th scope="row" style="text-align:left"> Arrange exec and obtain Future</th> * <td> {@link #submit(ForkJoinTask)}</td> * <td> {@link ForkJoinTask#fork} (ForkJoinTasks <em>are</em> Futures)</td> * </tr> * </table> * * <p>The parameters used to construct the common pool may be controlled by * setting the following {@linkplain System#getProperty system properties}: * <ul> * <li>{@systemProperty java.util.concurrent.ForkJoinPool.common.parallelism} * - the parallelism level, a non-negative integer * <li>{@systemProperty java.util.concurrent.ForkJoinPool.common.threadFactory} * - the class name of a {@link ForkJoinWorkerThreadFactory}. * The {@linkplain ClassLoader#getSystemClassLoader() system class loader} * is used to load this class. * <li>{@systemProperty java.util.concurrent.ForkJoinPool.common.exceptionHandler} * - the class name of a {@link UncaughtExceptionHandler}. * The {@linkplain ClassLoader#getSystemClassLoader() system class loader} * is used to load this class. * <li>{@systemProperty java.util.concurrent.ForkJoinPool.common.maximumSpares} * - the maximum number of allowed extra threads to maintain target * parallelism (default 256). * </ul> * If no thread factory is supplied via a system property, then the * common pool uses a factory that uses the system class loader as the * {@linkplain Thread#getContextClassLoader() thread context class loader}. * In addition, if a {@link SecurityManager} is present, then * the common pool uses a factory supplying threads that have no * {@link Permissions} enabled. * * Upon any error in establishing these settings, default parameters * are used. It is possible to disable or limit the use of threads in * the common pool by setting the parallelism property to zero, and/or * using a factory that may return {@code null}. However doing so may * cause unjoined tasks to never be executed. * * <p><b>Implementation notes</b>: This implementation restricts the * maximum number of running threads to 32767. Attempts to create * pools with greater than the maximum number result in * {@code IllegalArgumentException}. * * <p>This implementation rejects submitted tasks (that is, by throwing * {@link RejectedExecutionException}) only when the pool is shut down * or internal resources have been exhausted. * * @since 1.7 * @author Doug Lea */ public class ForkJoinPool extends AbstractExecutorService { /* * Implementation Overview * * This class and its nested classes provide the main * functionality and control for a set of worker threads: * Submissions from non-FJ threads enter into submission queues. * Workers take these tasks and typically split them into subtasks * that may be stolen by other workers. Work-stealing based on * randomized scans generally leads to better throughput than * "work dealing" in which producers assign tasks to idle threads, * in part because threads that have finished other tasks before * the signalled thread wakes up (which can be a long time) can * take the task instead. Preference rules give first priority to * processing tasks from their own queues (LIFO or FIFO, depending * on mode), then to randomized FIFO steals of tasks in other * queues. This framework began as vehicle for supporting * tree-structured parallelism using work-stealing. Over time, * its scalability advantages led to extensions and changes to * better support more diverse usage contexts. Because most * internal methods and nested classes are interrelated, their * main rationale and descriptions are presented here; individual * methods and nested classes contain only brief comments about * details. * * WorkQueues * ========== * * Most operations occur within work-stealing queues (in nested * class WorkQueue). These are special forms of Deques that * support only three of the four possible end-operations -- push, * pop, and poll (aka steal), under the further constraints that * push and pop are called only from the owning thread (or, as * extended here, under a lock), while poll may be called from * other threads. (If you are unfamiliar with them, you probably * want to read Herlihy and Shavit's book "The Art of * Multiprocessor programming", chapter 16 describing these in * more detail before proceeding.) The main work-stealing queue * design is roughly similar to those in the papers "Dynamic * Circular Work-Stealing Deque" by Chase and Lev, SPAA 2005 * (http://research.sun.com/scalable/pubs/index.html) and * "Idempotent work stealing" by Michael, Saraswat, and Vechev, * PPoPP 2009 (http://portal.acm.org/citation.cfm?id=1504186). * The main differences ultimately stem from GC requirements that * we null out taken slots as soon as we can, to maintain as small * a footprint as possible even in programs generating huge * numbers of tasks. To accomplish this, we shift the CAS * arbitrating pop vs poll (steal) from being on the indices * ("base" and "top") to the slots themselves. * * Adding tasks then takes the form of a classic array push(task) * in a circular buffer: * q.array[q.top++ % length] = task; * * (The actual code needs to null-check and size-check the array, * uses masking, not mod, for indexing a power-of-two-sized array, * adds a release fence for publication, and possibly signals * waiting workers to start scanning -- see below.) Both a * successful pop and poll mainly entail a CAS of a slot from * non-null to null. * * The pop operation (always performed by owner) is: * if ((the task at top slot is not null) and * (CAS slot to null)) * decrement top and return task; * * And the poll operation (usually by a stealer) is * if ((the task at base slot is not null) and * (CAS slot to null)) * increment base and return task; * * There are several variants of each of these. Most uses occur * within operations that also interleave contention or emptiness * tracking or inspection of elements before extracting them, so * must interleave these with the above code. When performed by * owner, getAndSet is used instead of CAS (see for example method * nextLocalTask) which is usually more efficient, and possible * because the top index cannot independently change during the * operation. * * Memory ordering. See "Correct and Efficient Work-Stealing for * Weak Memory Models" by Le, Pop, Cohen, and Nardelli, PPoPP 2013 * (http://www.di.ens.fr/~zappa/readings/ppopp13.pdf) for an * analysis of memory ordering requirements in work-stealing * algorithms similar to (but different than) the one used here. * Extracting tasks in array slots via (fully fenced) CAS provides * primary synchronization. The base and top indices imprecisely * guide where to extract from. We do not usually require strict * orderings of array and index updates. Many index accesses use * plain mode, with ordering constrained by surrounding context * (usually with respect to element CASes or the two WorkQueue * volatile fields source and phase). When not otherwise already * constrained, reads of "base" by queue owners use acquire-mode, * and some externally callable methods preface accesses with * acquire fences. Additionally, to ensure that index update * writes are not coalesced or postponed in loops etc, "opaque" * mode is used in a few cases where timely writes are not * otherwise ensured. The "locked" versions of push- and pop- * based methods for shared queues differ from owned versions * because locking already forces some of the ordering. * * Because indices and slot contents cannot always be consistent, * a check that base == top indicates (momentary) emptiness, but * otherwise may err on the side of possibly making the queue * appear nonempty when a push, pop, or poll have not fully * committed, or making it appear empty when an update of top has * not yet been visibly written. (Method isEmpty() checks the * case of a partially completed removal of the last element.) * Because of this, the poll operation, considered individually, * is not wait-free. One thief cannot successfully continue until * another in-progress one (or, if previously empty, a push) * visibly completes. This can stall threads when required to * consume from a given queue (see method poll()). However, in * the aggregate, we ensure at least probabilistic * non-blockingness. If an attempted steal fails, a scanning * thief chooses a different random victim target to try next. So, * in order for one thief to progress, it suffices for any * in-progress poll or new push on any empty queue to complete. * * This approach also enables support of a user mode in which * local task processing is in FIFO, not LIFO order, simply by * using poll rather than pop. This can be useful in * message-passing frameworks in which tasks are never joined. * * WorkQueues are also used in a similar way for tasks submitted * to the pool. We cannot mix these tasks in the same queues used * by workers. Instead, we randomly associate submission queues * with submitting threads, using a form of hashing. The * ThreadLocalRandom probe value serves as a hash code for * choosing existing queues, and may be randomly repositioned upon * contention with other submitters. In essence, submitters act * like workers except that they are restricted to executing local * tasks that they submitted. Insertion of tasks in shared mode * requires a lock but we use only a simple spinlock (using field * phase), because submitters encountering a busy queue move to a * different position to use or create other queues -- they block * only when creating and registering new queues. Because it is * used only as a spinlock, unlocking requires only a "releasing" * store (using setRelease) unless otherwise signalling. * * Management * ========== * * The main throughput advantages of work-stealing stem from * decentralized control -- workers mostly take tasks from * themselves or each other, at rates that can exceed a billion * per second. The pool itself creates, activates (enables * scanning for and running tasks), deactivates, blocks, and * terminates threads, all with minimal central information. * There are only a few properties that we can globally track or * maintain, so we pack them into a small number of variables, * often maintaining atomicity without blocking or locking. * Nearly all essentially atomic control state is held in a few * volatile variables that are by far most often read (not * written) as status and consistency checks. We pack as much * information into them as we can. * * Field "ctl" contains 64 bits holding information needed to * atomically decide to add, enqueue (on an event queue), and * dequeue and release workers. To enable this packing, we * restrict maximum parallelism to (1<<15)-1 (which is far in * excess of normal operating range) to allow ids, counts, and * their negations (used for thresholding) to fit into 16bit * subfields. * * Field "mode" holds configuration parameters as well as lifetime * status, atomically and monotonically setting SHUTDOWN, STOP, * and finally TERMINATED bits. * * Field "workQueues" holds references to WorkQueues. It is * updated (only during worker creation and termination) under * lock (using field workerNamePrefix as lock), but is otherwise * concurrently readable, and accessed directly. We also ensure * that uses of the array reference itself never become too stale * in case of resizing, by arranging that (re-)reads are separated * by at least one acquiring read access. To simplify index-based * operations, the array size is always a power of two, and all * readers must tolerate null slots. Worker queues are at odd * indices. Shared (submission) queues are at even indices, up to * a maximum of 64 slots, to limit growth even if the array needs * to expand to add more workers. Grouping them together in this * way simplifies and speeds up task scanning. * * All worker thread creation is on-demand, triggered by task * submissions, replacement of terminated workers, and/or * compensation for blocked workers. However, all other support * code is set up to work with other policies. To ensure that we * do not hold on to worker references that would prevent GC, all * accesses to workQueues are via indices into the workQueues * array (which is one source of some of the messy code * constructions here). In essence, the workQueues array serves as * a weak reference mechanism. Thus for example the stack top * subfield of ctl stores indices, not references. * * Queuing Idle Workers. Unlike HPC work-stealing frameworks, we * cannot let workers spin indefinitely scanning for tasks when * none can be found immediately, and we cannot start/resume * workers unless there appear to be tasks available. On the * other hand, we must quickly prod them into action when new * tasks are submitted or generated. In many usages, ramp-up time * is the main limiting factor in overall performance, which is * compounded at program start-up by JIT compilation and * allocation. So we streamline this as much as possible. * * The "ctl" field atomically maintains total worker and * "released" worker counts, plus the head of the available worker * queue (actually stack, represented by the lower 32bit subfield * of ctl). Released workers are those known to be scanning for * and/or running tasks. Unreleased ("available") workers are * recorded in the ctl stack. These workers are made available for * signalling by enqueuing in ctl (see method runWorker). The * "queue" is a form of Treiber stack. This is ideal for * activating threads in most-recently used order, and improves * performance and locality, outweighing the disadvantages of * being prone to contention and inability to release a worker * unless it is topmost on stack. To avoid missed signal problems * inherent in any wait/signal design, available workers rescan * for (and if found run) tasks after enqueuing. Normally their * release status will be updated while doing so, but the released * worker ctl count may underestimate the number of active * threads. (However, it is still possible to determine quiescence * via a validation traversal -- see isQuiescent). After an * unsuccessful rescan, available workers are blocked until * signalled (see signalWork). The top stack state holds the * value of the "phase" field of the worker: its index and status, * plus a version counter that, in addition to the count subfields * (also serving as version stamps) provide protection against * Treiber stack ABA effects. * * Creating workers. To create a worker, we pre-increment counts * (serving as a reservation), and attempt to construct a * ForkJoinWorkerThread via its factory. Upon construction, the * new thread invokes registerWorker, where it constructs a * WorkQueue and is assigned an index in the workQueues array * (expanding the array if necessary). The thread is then started. * Upon any exception across these steps, or null return from * factory, deregisterWorker adjusts counts and records * accordingly. If a null return, the pool continues running with * fewer than the target number workers. If exceptional, the * exception is propagated, generally to some external caller. * Worker index assignment avoids the bias in scanning that would * occur if entries were sequentially packed starting at the front * of the workQueues array. We treat the array as a simple * power-of-two hash table, expanding as needed. The seedIndex * increment ensures no collisions until a resize is needed or a * worker is deregistered and replaced, and thereafter keeps * probability of collision low. We cannot use * ThreadLocalRandom.getProbe() for similar purposes here because * the thread has not started yet, but do so for creating * submission queues for existing external threads (see * externalPush). * * WorkQueue field "phase" is used by both workers and the pool to * manage and track whether a worker is UNSIGNALLED (possibly * blocked waiting for a signal). When a worker is enqueued its * phase field is set. Note that phase field updates lag queue CAS * releases so usage requires care -- seeing a negative phase does * not guarantee that the worker is available. When queued, the * lower 16 bits of scanState must hold its pool index. So we * place the index there upon initialization and otherwise keep it * there or restore it when necessary. * * The ctl field also serves as the basis for memory * synchronization surrounding activation. This uses a more * efficient version of a Dekker-like rule that task producers and * consumers sync with each other by both writing/CASing ctl (even * if to its current value). This would be extremely costly. So * we relax it in several ways: (1) Producers only signal when * their queue is possibly empty at some point during a push * operation. (2) Other workers propagate this signal * when they find tasks in a queue with size greater than one. (3) * Workers only enqueue after scanning (see below) and not finding * any tasks. (4) Rather than CASing ctl to its current value in * the common case where no action is required, we reduce write * contention by equivalently prefacing signalWork when called by * an external task producer using a memory access with * full-volatile semantics or a "fullFence". * * Almost always, too many signals are issued, in part because a * task producer cannot tell if some existing worker is in the * midst of finishing one task (or already scanning) and ready to * take another without being signalled. So the producer might * instead activate a different worker that does not find any * work, and then inactivates. This scarcely matters in * steady-state computations involving all workers, but can create * contention and bookkeeping bottlenecks during ramp-up, * ramp-down, and small computations involving only a few workers. * * Scanning. Method scan (from runWorker) performs top-level * scanning for tasks. (Similar scans appear in helpQuiesce and * pollScan.) Each scan traverses and tries to poll from each * queue starting at a random index. Scans are not performed in * ideal random permutation order, to reduce cacheline * contention. The pseudorandom generator need not have * high-quality statistical properties in the long term, but just * within computations; We use Marsaglia XorShifts (often via * ThreadLocalRandom.nextSecondarySeed), which are cheap and * suffice. Scanning also includes contention reduction: When * scanning workers fail to extract an apparently existing task, * they soon restart at a different pseudorandom index. This form * of backoff improves throughput when many threads are trying to * take tasks from few queues, which can be common in some usages. * Scans do not otherwise explicitly take into account core * affinities, loads, cache localities, etc, However, they do * exploit temporal locality (which usually approximates these) by * preferring to re-poll from the same queue after a successful * poll before trying others (see method topLevelExec). However * this preference is bounded (see TOP_BOUND_SHIFT) as a safeguard * against infinitely unfair looping under unbounded user task * recursion, and also to reduce long-term contention when many * threads poll few queues holding many small tasks. The bound is * high enough to avoid much impact on locality and scheduling * overhead. * * Trimming workers. To release resources after periods of lack of * use, a worker starting to wait when the pool is quiescent will * time out and terminate (see method runWorker) if the pool has * remained quiescent for period given by field keepAlive. * * Shutdown and Termination. A call to shutdownNow invokes * tryTerminate to atomically set a runState bit. The calling * thread, as well as every other worker thereafter terminating, * helps terminate others by cancelling their unprocessed tasks, * and waking them up, doing so repeatedly until stable. Calls to * non-abrupt shutdown() preface this by checking whether * termination should commence by sweeping through queues (until * stable) to ensure lack of in-flight submissions and workers * about to process them before triggering the "STOP" phase of * termination. * * Joining Tasks * ============= * * Any of several actions may be taken when one worker is waiting * to join a task stolen (or always held) by another. Because we * are multiplexing many tasks on to a pool of workers, we can't * always just let them block (as in Thread.join). We also cannot * just reassign the joiner's run-time stack with another and * replace it later, which would be a form of "continuation", that * even if possible is not necessarily a good idea since we may * need both an unblocked task and its continuation to progress. * Instead we combine two tactics: * * Helping: Arranging for the joiner to execute some task that it * would be running if the steal had not occurred. * * Compensating: Unless there are already enough live threads, * method tryCompensate() may create or re-activate a spare * thread to compensate for blocked joiners until they unblock. * * A third form (implemented in tryRemoveAndExec) amounts to * helping a hypothetical compensator: If we can readily tell that * a possible action of a compensator is to steal and execute the * task being joined, the joining thread can do so directly, * without the need for a compensation thread. * * The ManagedBlocker extension API can't use helping so relies * only on compensation in method awaitBlocker. * * The algorithm in awaitJoin entails a form of "linear helping". * Each worker records (in field source) the id of the queue from * which it last stole a task. The scan in method awaitJoin uses * these markers to try to find a worker to help (i.e., steal back * a task from and execute it) that could hasten completion of the * actively joined task. Thus, the joiner executes a task that * would be on its own local deque if the to-be-joined task had * not been stolen. This is a conservative variant of the approach * described in Wagner & Calder "Leapfrogging: a portable * technique for implementing efficient futures" SIGPLAN Notices, * 1993 (http://portal.acm.org/citation.cfm?id=155354). It differs * mainly in that we only record queue ids, not full dependency * links. This requires a linear scan of the workQueues array to * locate stealers, but isolates cost to when it is needed, rather * than adding to per-task overhead. Searches can fail to locate * stealers GC stalls and the like delay recording sources. * Further, even when accurately identified, stealers might not * ever produce a task that the joiner can in turn help with. So, * compensation is tried upon failure to find tasks to run. * * Compensation does not by default aim to keep exactly the target * parallelism number of unblocked threads running at any given * time. Some previous versions of this class employed immediate * compensations for any blocked join. However, in practice, the * vast majority of blockages are transient byproducts of GC and * other JVM or OS activities that are made worse by replacement * when they cause longer-term oversubscription. Rather than * impose arbitrary policies, we allow users to override the * default of only adding threads upon apparent starvation. The * compensation mechanism may also be bounded. Bounds for the * commonPool (see COMMON_MAX_SPARES) better enable JVMs to cope * with programming errors and abuse before running out of * resources to do so. * * Common Pool * =========== * * The static common pool always exists after static * initialization. Since it (or any other created pool) need * never be used, we minimize initial construction overhead and * footprint to the setup of about a dozen fields. * * When external threads submit to the common pool, they can * perform subtask processing (see externalHelpComplete and * related methods) upon joins. This caller-helps policy makes it * sensible to set common pool parallelism level to one (or more) * less than the total number of available cores, or even zero for * pure caller-runs. We do not need to record whether external * submissions are to the common pool -- if not, external help * methods return quickly. These submitters would otherwise be * blocked waiting for completion, so the extra effort (with * liberally sprinkled task status checks) in inapplicable cases * amounts to an odd form of limited spin-wait before blocking in * ForkJoinTask.join. * * As a more appropriate default in managed environments, unless * overridden by system properties, we use workers of subclass * InnocuousForkJoinWorkerThread when there is a SecurityManager * present. These workers have no permissions set, do not belong * to any user-defined ThreadGroup, and erase all ThreadLocals * after executing any top-level task (see * WorkQueue.afterTopLevelExec). The associated mechanics (mainly * in ForkJoinWorkerThread) may be JVM-dependent and must access * particular Thread class fields to achieve this effect. * * Memory placement * ================ * * Performance can be very sensitive to placement of instances of * ForkJoinPool and WorkQueues and their queue arrays. To reduce * false-sharing impact, the @Contended annotation isolates * adjacent WorkQueue instances, as well as the ForkJoinPool.ctl * field. WorkQueue arrays are allocated (by their threads) with * larger initial sizes than most ever need, mostly to reduce * false sharing with current garbage collectors that use cardmark * tables. * * Style notes * =========== * * Memory ordering relies mainly on VarHandles. This can be * awkward and ugly, but also reflects the need to control * outcomes across the unusual cases that arise in very racy code * with very few invariants. All fields are read into locals * before use, and null-checked if they are references. Array * accesses using masked indices include checks (that are always * true) that the array length is non-zero to avoid compilers * inserting more expensive traps. This is usually done in a * "C"-like style of listing declarations at the heads of methods * or blocks, and using inline assignments on first encounter. * Nearly all explicit checks lead to bypass/return, not exception * throws, because they may legitimately arise due to * cancellation/revocation during shutdown. * * There is a lot of representation-level coupling among classes * ForkJoinPool, ForkJoinWorkerThread, and ForkJoinTask. The * fields of WorkQueue maintain data structures managed by * ForkJoinPool, so are directly accessed. There is little point * trying to reduce this, since any associated future changes in * representations will need to be accompanied by algorithmic * changes anyway. Several methods intrinsically sprawl because * they must accumulate sets of consistent reads of fields held in * local variables. Some others are artificially broken up to * reduce producer/consumer imbalances due to dynamic compilation. * There are also other coding oddities (including several * unnecessary-looking hoisted null checks) that help some methods * perform reasonably even when interpreted (not compiled). * * The order of declarations in this file is (with a few exceptions): * (1) Static utility functions * (2) Nested (static) classes * (3) Static fields * (4) Fields, along with constants used when unpacking some of them * (5) Internal control methods * (6) Callbacks and other support for ForkJoinTask methods * (7) Exported methods * (8) Static block initializing statics in minimally dependent order */ // Static utilities /** * If there is a security manager, makes sure caller has * permission to modify threads. */ private static void checkPermission() { SecurityManager security = System.getSecurityManager(); if (security != null) security.checkPermission(modifyThreadPermission); } // Nested classes /** * Factory for creating new {@link ForkJoinWorkerThread}s. * A {@code ForkJoinWorkerThreadFactory} must be defined and used * for {@code ForkJoinWorkerThread} subclasses that extend base * functionality or initialize threads with different contexts. */ public static interface ForkJoinWorkerThreadFactory { /** * Returns a new worker thread operating in the given pool. * Returning null or throwing an exception may result in tasks * never being executed. If this method throws an exception, * it is relayed to the caller of the method (for example * {@code execute}) causing attempted thread creation. If this * method returns null or throws an exception, it is not * retried until the next attempted creation (for example * another call to {@code execute}). * * @param pool the pool this thread works in * @return the new worker thread, or {@code null} if the request * to create a thread is rejected * @throws NullPointerException if the pool is null */ public ForkJoinWorkerThread newThread(ForkJoinPool pool); } static AccessControlContext contextWithPermissions(Permission ... perms) { Permissions permissions = new Permissions(); for (Permission perm : perms) permissions.add(perm); return new AccessControlContext( new ProtectionDomain[] { new ProtectionDomain(null, permissions) }); } /** * Default ForkJoinWorkerThreadFactory implementation; creates a * new ForkJoinWorkerThread using the system class loader as the * thread context class loader. */ private static final class DefaultForkJoinWorkerThreadFactory implements ForkJoinWorkerThreadFactory { private static final AccessControlContext ACC = contextWithPermissions( new RuntimePermission("getClassLoader"), new RuntimePermission("setContextClassLoader")); public final ForkJoinWorkerThread newThread(ForkJoinPool pool) { return AccessController.doPrivileged( new PrivilegedAction<>() { public ForkJoinWorkerThread run() { return new ForkJoinWorkerThread( pool, ClassLoader.getSystemClassLoader()); }}, ACC); } } // Constants shared across ForkJoinPool and WorkQueue // Bounds static final int SWIDTH = 16; // width of short static final int SMASK = 0xffff; // short bits == max index static final int MAX_CAP = 0x7fff; // max #workers - 1 static final int SQMASK = 0x007e; // max 64 (even) slots // Masks and units for WorkQueue.phase and ctl sp subfield static final int UNSIGNALLED = 1 << 31; // must be negative static final int SS_SEQ = 1 << 16; // version count static final int QLOCK = 1; // must be 1 // Mode bits and sentinels, some also used in WorkQueue id and.source fields static final int OWNED = 1; // queue has owner thread static final int FIFO = 1 << 16; // fifo queue or access mode static final int SHUTDOWN = 1 << 18; static final int TERMINATED = 1 << 19; static final int STOP = 1 << 31; // must be negative static final int QUIET = 1 << 30; // not scanning or working static final int DORMANT = QUIET | UNSIGNALLED; /** * Initial capacity of work-stealing queue array. * Must be a power of two, at least 2. */ static final int INITIAL_QUEUE_CAPACITY = 1 << 13; /** * Maximum capacity for queue arrays. Must be a power of two less * than or equal to 1 << (31 - width of array entry) to ensure * lack of wraparound of index calculations, but defined to a * value a bit less than this to help users trap runaway programs * before saturating systems. */ static final int MAXIMUM_QUEUE_CAPACITY = 1 << 26; // 64M /** * The maximum number of top-level polls per worker before * checking other queues, expressed as a bit shift. See above for * rationale. */ static final int TOP_BOUND_SHIFT = 10; /** * Queues supporting work-stealing as well as external task * submission. See above for descriptions and algorithms. */ @jdk.internal.vm.annotation.Contended static final class WorkQueue { volatile int source; // source queue id, or sentinel int id; // pool index, mode, tag int base; // index of next slot for poll int top; // index of next slot for push volatile int phase; // versioned, negative: queued, 1: locked int stackPred; // pool stack (ctl) predecessor link int nsteals; // number of steals ForkJoinTask<?>[] array; // the queued tasks; power of 2 size final ForkJoinPool pool; // the containing pool (may be null) final ForkJoinWorkerThread owner; // owning thread or null if shared WorkQueue(ForkJoinPool pool, ForkJoinWorkerThread owner) { this.pool = pool; this.owner = owner; // Place indices in the center of array (that is not yet allocated) base = top = INITIAL_QUEUE_CAPACITY >>> 1; } /** * Tries to lock shared queue by CASing phase field. */ final boolean tryLockPhase() { return PHASE.compareAndSet(this, 0, 1); } final void releasePhaseLock() { PHASE.setRelease(this, 0); } /** * Returns an exportable index (used by ForkJoinWorkerThread). */ final int getPoolIndex() { return (id & 0xffff) >>> 1; // ignore odd/even tag bit } /** * Returns the approximate number of tasks in the queue. */ final int queueSize() { int n = (int)BASE.getAcquire(this) - top; return (n >= 0) ? 0 : -n; // ignore transient negative } /** * Provides a more accurate estimate of whether this queue has * any tasks than does queueSize, by checking whether a * near-empty queue has at least one unclaimed task. */ final boolean isEmpty() { ForkJoinTask<?>[] a; int n, cap, b; VarHandle.acquireFence(); // needed by external callers return ((n = (b = base) - top) >= 0 || // possibly one task (n == -1 && ((a = array) == null || (cap = a.length) == 0 || a[(cap - 1) & b] == null))); } /** * Pushes a task. Call only by owner in unshared queues. * * @param task the task. Caller must ensure non-null. * @throws RejectedExecutionException if array cannot be resized */ final void push(ForkJoinTask<?> task) { ForkJoinTask<?>[] a; int s = top, d = s - base, cap, m; ForkJoinPool p = pool; if ((a = array) != null && (cap = a.length) > 0) { QA.setRelease(a, (m = cap - 1) & s, task); top = s + 1; if (d == m) growArray(false); else if (QA.getAcquire(a, m & (s - 1)) == null && p != null) { VarHandle.fullFence(); // was empty p.signalWork(null); } } } /** * Version of push for shared queues. Call only with phase lock held. * @return true if should signal work */ final boolean lockedPush(ForkJoinTask<?> task) { ForkJoinTask<?>[] a; boolean signal = false; int s = top, d = s - base, cap, m; if ((a = array) != null && (cap = a.length) > 0) { a[(m = (cap - 1)) & s] = task; top = s + 1; if (d == m) growArray(true); else { phase = 0; // full volatile unlock if (((s - base) & ~1) == 0) // size 0 or 1 signal = true; } } return signal; } /** * Doubles the capacity of array. Call either by owner or with * lock held -- it is OK for base, but not top, to move while * resizings are in progress. */ final void growArray(boolean locked) { ForkJoinTask<?>[] newA = null; try { ForkJoinTask<?>[] oldA; int oldSize, newSize; if ((oldA = array) != null && (oldSize = oldA.length) > 0 && (newSize = oldSize << 1) <= MAXIMUM_QUEUE_CAPACITY && newSize > 0) { try { newA = new ForkJoinTask<?>[newSize]; } catch (OutOfMemoryError ex) { } if (newA != null) { // poll from old array, push to new int oldMask = oldSize - 1, newMask = newSize - 1; for (int s = top - 1, k = oldMask; k >= 0; --k) { ForkJoinTask<?> x = (ForkJoinTask<?>) QA.getAndSet(oldA, s & oldMask, null); if (x != null) newA[s-- & newMask] = x; else break; } array = newA; VarHandle.releaseFence(); } } } finally { if (locked) phase = 0; } if (newA == null) throw new RejectedExecutionException("Queue capacity exceeded"); } /** * Takes next task, if one exists, in FIFO order. */ final ForkJoinTask<?> poll() { int b, k, cap; ForkJoinTask<?>[] a; while ((a = array) != null && (cap = a.length) > 0 && top - (b = base) > 0) { ForkJoinTask<?> t = (ForkJoinTask<?>) QA.getAcquire(a, k = (cap - 1) & b); if (base == b++) { if (t == null) Thread.yield(); // await index advance else if (QA.compareAndSet(a, k, t, null)) { BASE.setOpaque(this, b); return t; } } } return null; } /** * Takes next task, if one exists, in order specified by mode. */ final ForkJoinTask<?> nextLocalTask() { ForkJoinTask<?> t = null; int md = id, b, s, d, cap; ForkJoinTask<?>[] a; if ((a = array) != null && (cap = a.length) > 0 && (d = (s = top) - (b = base)) > 0) { if ((md & FIFO) == 0 || d == 1) { if ((t = (ForkJoinTask<?>) QA.getAndSet(a, (cap - 1) & --s, null)) != null) TOP.setOpaque(this, s); } else if ((t = (ForkJoinTask<?>) QA.getAndSet(a, (cap - 1) & b++, null)) != null) { BASE.setOpaque(this, b); } else // on contention in FIFO mode, use regular poll t = poll(); } return t; } /** * Returns next task, if one exists, in order specified by mode. */ final ForkJoinTask<?> peek() { int cap; ForkJoinTask<?>[] a; return ((a = array) != null && (cap = a.length) > 0) ? a[(cap - 1) & ((id & FIFO) != 0 ? base : top - 1)] : null; } /** * Pops the given task only if it is at the current top. */ final boolean tryUnpush(ForkJoinTask<?> task) { boolean popped = false; int s, cap; ForkJoinTask<?>[] a; if ((a = array) != null && (cap = a.length) > 0 && (s = top) != base && (popped = QA.compareAndSet(a, (cap - 1) & --s, task, null))) TOP.setOpaque(this, s); return popped; } /** * Shared version of tryUnpush. */ final boolean tryLockedUnpush(ForkJoinTask<?> task) { boolean popped = false; int s = top - 1, k, cap; ForkJoinTask<?>[] a; if ((a = array) != null && (cap = a.length) > 0 && a[k = (cap - 1) & s] == task && tryLockPhase()) { if (top == s + 1 && array == a && (popped = QA.compareAndSet(a, k, task, null))) top = s; releasePhaseLock(); } return popped; } /** * Removes and cancels all known tasks, ignoring any exceptions. */ final void cancelAll() { for (ForkJoinTask<?> t; (t = poll()) != null; ) ForkJoinTask.cancelIgnoringExceptions(t); } // Specialized execution methods /** * Runs the given (stolen) task if nonnull, as well as * remaining local tasks and others available from the given * queue, up to bound n (to avoid infinite unfairness). */ final void topLevelExec(ForkJoinTask<?> t, WorkQueue q, int n) { int nstolen = 1; for (int j = 0;;) { if (t != null) t.doExec(); if (j++ <= n) t = nextLocalTask(); else { j = 0; t = null; } if (t == null) { if (q != null && (t = q.poll()) != null) { ++nstolen; j = 0; } else if (j != 0) break; } } ForkJoinWorkerThread thread = owner; nsteals += nstolen; source = 0; if (thread != null) thread.afterTopLevelExec(); } /** * If present, removes task from queue and executes it. */ final void tryRemoveAndExec(ForkJoinTask<?> task) { ForkJoinTask<?>[] a; int s, cap; if ((a = array) != null && (cap = a.length) > 0 && (s = top) - base > 0) { // traverse from top for (int m = cap - 1, ns = s - 1, i = ns; ; --i) { int index = i & m; ForkJoinTask<?> t = (ForkJoinTask<?>)QA.get(a, index); if (t == null) break; else if (t == task) { if (QA.compareAndSet(a, index, t, null)) { top = ns; // safely shift down for (int j = i; j != ns; ++j) { ForkJoinTask<?> f; int pindex = (j + 1) & m; f = (ForkJoinTask<?>)QA.get(a, pindex); QA.setVolatile(a, pindex, null); int jindex = j & m; QA.setRelease(a, jindex, f); } VarHandle.releaseFence(); t.doExec(); } break; } } } } /** * Tries to pop and run tasks within the target's computation * until done, not found, or limit exceeded. * * @param task root of CountedCompleter computation * @param limit max runs, or zero for no limit * @param shared true if must lock to extract task * @return task status on exit */ final int helpCC(CountedCompleter<?> task, int limit, boolean shared) { int status = 0; if (task != null && (status = task.status) >= 0) { int s, k, cap; ForkJoinTask<?>[] a; while ((a = array) != null && (cap = a.length) > 0 && (s = top) - base > 0) { CountedCompleter<?> v = null; ForkJoinTask<?> o = a[k = (cap - 1) & (s - 1)]; if (o instanceof CountedCompleter) { CountedCompleter<?> t = (CountedCompleter<?>)o; for (CountedCompleter<?> f = t;;) { if (f != task) { if ((f = f.completer) == null) break; } else if (shared) { if (tryLockPhase()) { if (top == s && array == a && QA.compareAndSet(a, k, t, null)) { top = s - 1; v = t; } releasePhaseLock(); } break; } else { if (QA.compareAndSet(a, k, t, null)) { top = s - 1; v = t; } break; } } } if (v != null) v.doExec(); if ((status = task.status) < 0 || v == null || (limit != 0 && --limit == 0)) break; } } return status; } /** * Tries to poll and run AsynchronousCompletionTasks until * none found or blocker is released * * @param blocker the blocker */ final void helpAsyncBlocker(ManagedBlocker blocker) { if (blocker != null) { int b, k, cap; ForkJoinTask<?>[] a; ForkJoinTask<?> t; while ((a = array) != null && (cap = a.length) > 0 && top - (b = base) > 0) { t = (ForkJoinTask<?>)QA.getAcquire(a, k = (cap - 1) & b); if (blocker.isReleasable()) break; else if (base == b++ && t != null) { if (!(t instanceof CompletableFuture. AsynchronousCompletionTask)) break; else if (QA.compareAndSet(a, k, t, null)) { BASE.setOpaque(this, b); t.doExec(); } } } } } /** * Returns true if owned and not known to be blocked. */ final boolean isApparentlyUnblocked() { Thread wt; Thread.State s; return ((wt = owner) != null && (s = wt.getState()) != Thread.State.BLOCKED && s != Thread.State.WAITING && s != Thread.State.TIMED_WAITING); } // VarHandle mechanics. static final VarHandle PHASE; static final VarHandle BASE; static final VarHandle TOP; static { try { MethodHandles.Lookup l = MethodHandles.lookup(); PHASE = l.findVarHandle(WorkQueue.class, "phase", int.class); BASE = l.findVarHandle(WorkQueue.class, "base", int.class); TOP = l.findVarHandle(WorkQueue.class, "top", int.class); } catch (ReflectiveOperationException e) { throw new ExceptionInInitializerError(e); } } } // static fields (initialized in static initializer below) /** * Creates a new ForkJoinWorkerThread. This factory is used unless * overridden in ForkJoinPool constructors. */ public static final ForkJoinWorkerThreadFactory defaultForkJoinWorkerThreadFactory; /** * Permission required for callers of methods that may start or * kill threads. */ static final RuntimePermission modifyThreadPermission; /** * Common (static) pool. Non-null for public use unless a static * construction exception, but internal usages null-check on use * to paranoically avoid potential initialization circularities * as well as to simplify generated code. */ static final ForkJoinPool common; /** * Common pool parallelism. To allow simpler use and management * when common pool threads are disabled, we allow the underlying * common.parallelism field to be zero, but in that case still report * parallelism as 1 to reflect resulting caller-runs mechanics. */ static final int COMMON_PARALLELISM; /** * Limit on spare thread construction in tryCompensate. */ private static final int COMMON_MAX_SPARES; /** * Sequence number for creating workerNamePrefix. */ private static int poolNumberSequence; /** * Returns the next sequence number. We don't expect this to * ever contend, so use simple builtin sync. */ private static final synchronized int nextPoolId() { return ++poolNumberSequence; } // static configuration constants /** * Default idle timeout value (in milliseconds) for the thread * triggering quiescence to park waiting for new work */ private static final long DEFAULT_KEEPALIVE = 60_000L; /** * Undershoot tolerance for idle timeouts */ private static final long TIMEOUT_SLOP = 20L; /** * The default value for COMMON_MAX_SPARES. Overridable using the * "java.util.concurrent.ForkJoinPool.common.maximumSpares" system * property. The default value is far in excess of normal * requirements, but also far short of MAX_CAP and typical OS * thread limits, so allows JVMs to catch misuse/abuse before * running out of resources needed to do so. */ private static final int DEFAULT_COMMON_MAX_SPARES = 256; /** * Increment for seed generators. See class ThreadLocal for * explanation. */ private static final int SEED_INCREMENT = 0x9e3779b9; /* * Bits and masks for field ctl, packed with 4 16 bit subfields: * RC: Number of released (unqueued) workers minus target parallelism * TC: Number of total workers minus target parallelism * SS: version count and status of top waiting thread * ID: poolIndex of top of Treiber stack of waiters * * When convenient, we can extract the lower 32 stack top bits * (including version bits) as sp=(int)ctl. The offsets of counts * by the target parallelism and the positionings of fields makes * it possible to perform the most common checks via sign tests of * fields: When ac is negative, there are not enough unqueued * workers, when tc is negative, there are not enough total * workers. When sp is non-zero, there are waiting workers. To * deal with possibly negative fields, we use casts in and out of * "short" and/or signed shifts to maintain signedness. * * Because it occupies uppermost bits, we can add one release count * using getAndAddLong of RC_UNIT, rather than CAS, when returning * from a blocked join. Other updates entail multiple subfields * and masking, requiring CAS. * * The limits packed in field "bounds" are also offset by the * parallelism level to make them comparable to the ctl rc and tc * fields. */ // Lower and upper word masks private static final long SP_MASK = 0xffffffffL; private static final long UC_MASK = ~SP_MASK; // Release counts private static final int RC_SHIFT = 48; private static final long RC_UNIT = 0x0001L << RC_SHIFT; private static final long RC_MASK = 0xffffL << RC_SHIFT; // Total counts private static final int TC_SHIFT = 32; private static final long TC_UNIT = 0x0001L << TC_SHIFT; private static final long TC_MASK = 0xffffL << TC_SHIFT; private static final long ADD_WORKER = 0x0001L << (TC_SHIFT + 15); // sign // Instance fields volatile long stealCount; // collects worker nsteals final long keepAlive; // milliseconds before dropping if idle int indexSeed; // next worker index final int bounds; // min, max threads packed as shorts volatile int mode; // parallelism, runstate, queue mode WorkQueue[] workQueues; // main registry final String workerNamePrefix; // for worker thread string; sync lock final ForkJoinWorkerThreadFactory factory; final UncaughtExceptionHandler ueh; // per-worker UEH final Predicate<? super ForkJoinPool> saturate; @jdk.internal.vm.annotation.Contended("fjpctl") // segregate volatile long ctl; // main pool control // Creating, registering and deregistering workers /** * Tries to construct and start one worker. Assumes that total * count has already been incremented as a reservation. Invokes * deregisterWorker on any failure. * * @return true if successful */ private boolean createWorker() { ForkJoinWorkerThreadFactory fac = factory; Throwable ex = null; ForkJoinWorkerThread wt = null; try { if (fac != null && (wt = fac.newThread(this)) != null) { wt.start(); return true; } } catch (Throwable rex) { ex = rex; } deregisterWorker(wt, ex); return false; } /** * Tries to add one worker, incrementing ctl counts before doing * so, relying on createWorker to back out on failure. * * @param c incoming ctl value, with total count negative and no * idle workers. On CAS failure, c is refreshed and retried if * this holds (otherwise, a new worker is not needed). */ private void tryAddWorker(long c) { do { long nc = ((RC_MASK & (c + RC_UNIT)) | (TC_MASK & (c + TC_UNIT))); if (ctl == c && CTL.compareAndSet(this, c, nc)) { createWorker(); break; } } while (((c = ctl) & ADD_WORKER) != 0L && (int)c == 0); } /** * Callback from ForkJoinWorkerThread constructor to establish and * record its WorkQueue. * * @param wt the worker thread * @return the worker's queue */ final WorkQueue registerWorker(ForkJoinWorkerThread wt) { UncaughtExceptionHandler handler; wt.setDaemon(true); // configure thread if ((handler = ueh) != null) wt.setUncaughtExceptionHandler(handler); int tid = 0; // for thread name int idbits = mode & FIFO; String prefix = workerNamePrefix; WorkQueue w = new WorkQueue(this, wt); if (prefix != null) { synchronized (prefix) { WorkQueue[] ws = workQueues; int n; int s = indexSeed += SEED_INCREMENT; idbits |= (s & ~(SMASK | FIFO | DORMANT)); if (ws != null && (n = ws.length) > 1) { int m = n - 1; tid = m & ((s << 1) | 1); // odd-numbered indices for (int probes = n >>> 1;;) { // find empty slot WorkQueue q; if ((q = ws[tid]) == null || q.phase == QUIET) break; else if (--probes == 0) { tid = n | 1; // resize below break; } else tid = (tid + 2) & m; } w.phase = w.id = tid | idbits; // now publishable if (tid < n) ws[tid] = w; else { // expand array int an = n << 1; WorkQueue[] as = new WorkQueue[an]; as[tid] = w; int am = an - 1; for (int j = 0; j < n; ++j) { WorkQueue v; // copy external queue if ((v = ws[j]) != null) // position may change as[v.id & am & SQMASK] = v; if (++j >= n) break; as[j] = ws[j]; // copy worker } workQueues = as; } } } wt.setName(prefix.concat(Integer.toString(tid))); } return w; } /** * Final callback from terminating worker, as well as upon failure * to construct or start a worker. Removes record of worker from * array, and adjusts counts. If pool is shutting down, tries to * complete termination. * * @param wt the worker thread, or null if construction failed * @param ex the exception causing failure, or null if none */ final void deregisterWorker(ForkJoinWorkerThread wt, Throwable ex) { WorkQueue w = null; int phase = 0; if (wt != null && (w = wt.workQueue) != null) { Object lock = workerNamePrefix; int wid = w.id; long ns = (long)w.nsteals & 0xffffffffL; if (lock != null) { synchronized (lock) { WorkQueue[] ws; int n, i; // remove index from array if ((ws = workQueues) != null && (n = ws.length) > 0 && ws[i = wid & (n - 1)] == w) ws[i] = null; stealCount += ns; } } phase = w.phase; } if (phase != QUIET) { // else pre-adjusted long c; // decrement counts do {} while (!CTL.weakCompareAndSet (this, c = ctl, ((RC_MASK & (c - RC_UNIT)) | (TC_MASK & (c - TC_UNIT)) | (SP_MASK & c)))); } if (w != null) w.cancelAll(); // cancel remaining tasks if (!tryTerminate(false, false) && // possibly replace worker w != null && w.array != null) // avoid repeated failures signalWork(null); if (ex == null) // help clean on way out ForkJoinTask.helpExpungeStaleExceptions(); else // rethrow ForkJoinTask.rethrow(ex); } /** * Tries to create or release a worker if too few are running. * @param q if non-null recheck if empty on CAS failure */ final void signalWork(WorkQueue q) { for (;;) { long c; int sp; WorkQueue[] ws; int i; WorkQueue v; if ((c = ctl) >= 0L) // enough workers break; else if ((sp = (int)c) == 0) { // no idle workers if ((c & ADD_WORKER) != 0L) // too few workers tryAddWorker(c); break; } else if ((ws = workQueues) == null) break; // unstarted/terminated else if (ws.length <= (i = sp & SMASK)) break; // terminated else if ((v = ws[i]) == null) break; // terminating else { int np = sp & ~UNSIGNALLED; int vp = v.phase; long nc = (v.stackPred & SP_MASK) | (UC_MASK & (c + RC_UNIT)); Thread vt = v.owner; if (sp == vp && CTL.compareAndSet(this, c, nc)) { v.phase = np; if (vt != null && v.source < 0) LockSupport.unpark(vt); break; } else if (q != null && q.isEmpty()) // no need to retry break; } } } /** * Tries to decrement counts (sometimes implicitly) and possibly * arrange for a compensating worker in preparation for blocking: * If not all core workers yet exist, creates one, else if any are * unreleased (possibly including caller) releases one, else if * fewer than the minimum allowed number of workers running, * checks to see that they are all active, and if so creates an * extra worker unless over maximum limit and policy is to * saturate. Most of these steps can fail due to interference, in * which case 0 is returned so caller will retry. A negative * return value indicates that the caller doesn't need to * re-adjust counts when later unblocked. * * @return 1: block then adjust, -1: block without adjust, 0 : retry */ private int tryCompensate(WorkQueue w) { int t, n, sp; long c = ctl; WorkQueue[] ws = workQueues; if ((t = (short)(c >>> TC_SHIFT)) >= 0) { if (ws == null || (n = ws.length) <= 0 || w == null) return 0; // disabled else if ((sp = (int)c) != 0) { // replace or release WorkQueue v = ws[sp & (n - 1)]; int wp = w.phase; long uc = UC_MASK & ((wp < 0) ? c + RC_UNIT : c); int np = sp & ~UNSIGNALLED; if (v != null) { int vp = v.phase; Thread vt = v.owner; long nc = ((long)v.stackPred & SP_MASK) | uc; if (vp == sp && CTL.compareAndSet(this, c, nc)) { v.phase = np; if (vt != null && v.source < 0) LockSupport.unpark(vt); return (wp < 0) ? -1 : 1; } } return 0; } else if ((int)(c >> RC_SHIFT) - // reduce parallelism (short)(bounds & SMASK) > 0) { long nc = ((RC_MASK & (c - RC_UNIT)) | (~RC_MASK & c)); return CTL.compareAndSet(this, c, nc) ? 1 : 0; } else { // validate int md = mode, pc = md & SMASK, tc = pc + t, bc = 0; boolean unstable = false; for (int i = 1; i < n; i += 2) { WorkQueue q; Thread wt; Thread.State ts; if ((q = ws[i]) != null) { if (q.source == 0) { unstable = true; break; } else { --tc; if ((wt = q.owner) != null && ((ts = wt.getState()) == Thread.State.BLOCKED || ts == Thread.State.WAITING)) ++bc; // worker is blocking } } } if (unstable || tc != 0 || ctl != c) return 0; // inconsistent else if (t + pc >= MAX_CAP || t >= (bounds >>> SWIDTH)) { Predicate<? super ForkJoinPool> sat; if ((sat = saturate) != null && sat.test(this)) return -1; else if (bc < pc) { // lagging Thread.yield(); // for retry spins return 0; } else throw new RejectedExecutionException( "Thread limit exceeded replacing blocked worker"); } } } long nc = ((c + TC_UNIT) & TC_MASK) | (c & ~TC_MASK); // expand pool return CTL.compareAndSet(this, c, nc) && createWorker() ? 1 : 0; } /** * Top-level runloop for workers, called by ForkJoinWorkerThread.run. * See above for explanation. */ final void runWorker(WorkQueue w) { int r = (w.id ^ ThreadLocalRandom.nextSecondarySeed()) | FIFO; // rng w.array = new ForkJoinTask<?>[INITIAL_QUEUE_CAPACITY]; // initialize for (;;) { int phase; if (scan(w, r)) { // scan until apparently empty r ^= r << 13; r ^= r >>> 17; r ^= r << 5; // move (xorshift) } else if ((phase = w.phase) >= 0) { // enqueue, then rescan long np = (w.phase = (phase + SS_SEQ) | UNSIGNALLED) & SP_MASK; long c, nc; do { w.stackPred = (int)(c = ctl); nc = ((c - RC_UNIT) & UC_MASK) | np; } while (!CTL.weakCompareAndSet(this, c, nc)); } else { // already queued int pred = w.stackPred; Thread.interrupted(); // clear before park w.source = DORMANT; // enable signal long c = ctl; int md = mode, rc = (md & SMASK) + (int)(c >> RC_SHIFT); if (md < 0) // terminating break; else if (rc <= 0 && (md & SHUTDOWN) != 0 && tryTerminate(false, false)) break; // quiescent shutdown else if (w.phase < 0) { if (rc <= 0 && pred != 0 && phase == (int)c) { long nc = (UC_MASK & (c - TC_UNIT)) | (SP_MASK & pred); long d = keepAlive + System.currentTimeMillis(); LockSupport.parkUntil(this, d); if (ctl == c && // drop on timeout if all idle d - System.currentTimeMillis() <= TIMEOUT_SLOP && CTL.compareAndSet(this, c, nc)) { w.phase = QUIET; break; } } else { LockSupport.park(this); if (w.phase < 0) // one spurious wakeup check LockSupport.park(this); } } w.source = 0; // disable signal } } } /** * Scans for and if found executes one or more top-level tasks from a queue. * * @return true if found an apparently non-empty queue, and * possibly ran task(s). */ private boolean scan(WorkQueue w, int r) { WorkQueue[] ws; int n; if ((ws = workQueues) != null && (n = ws.length) > 0 && w != null) { for (int m = n - 1, j = r & m;;) { WorkQueue q; int b; if ((q = ws[j]) != null && q.top != (b = q.base)) { int qid = q.id; ForkJoinTask<?>[] a; int cap, k; ForkJoinTask<?> t; if ((a = q.array) != null && (cap = a.length) > 0) { t = (ForkJoinTask<?>)QA.getAcquire(a, k = (cap - 1) & b); if (q.base == b++ && t != null && QA.compareAndSet(a, k, t, null)) { q.base = b; w.source = qid; if (a[(cap - 1) & b] != null) signalWork(q); // help signal if more tasks w.topLevelExec(t, q, // random fairness bound (r | (1 << TOP_BOUND_SHIFT)) & SMASK); } } return true; } else if (--n > 0) j = (j + 1) & m; else break; } } return false; } /** * Helps and/or blocks until the given task is done or timeout. * First tries locally helping, then scans other queues for a task * produced by one of w's stealers; compensating and blocking if * none are found (rescanning if tryCompensate fails). * * @param w caller * @param task the task * @param deadline for timed waits, if nonzero * @return task status on exit */ final int awaitJoin(WorkQueue w, ForkJoinTask<?> task, long deadline) { int s = 0; int seed = ThreadLocalRandom.nextSecondarySeed(); if (w != null && task != null && (!(task instanceof CountedCompleter) || (s = w.helpCC((CountedCompleter<?>)task, 0, false)) >= 0)) { w.tryRemoveAndExec(task); int src = w.source, id = w.id; int r = (seed >>> 16) | 1, step = (seed & ~1) | 2; s = task.status; while (s >= 0) { WorkQueue[] ws; int n = (ws = workQueues) == null ? 0 : ws.length, m = n - 1; while (n > 0) { WorkQueue q; int b; if ((q = ws[r & m]) != null && q.source == id && q.top != (b = q.base)) { ForkJoinTask<?>[] a; int cap, k; int qid = q.id; if ((a = q.array) != null && (cap = a.length) > 0) { ForkJoinTask<?> t = (ForkJoinTask<?>) QA.getAcquire(a, k = (cap - 1) & b); if (q.source == id && q.base == b++ && t != null && QA.compareAndSet(a, k, t, null)) { q.base = b; w.source = qid; t.doExec(); w.source = src; } } break; } else { r += step; --n; } } if ((s = task.status) < 0) break; else if (n == 0) { // empty scan long ms, ns; int block; if (deadline == 0L) ms = 0L; // untimed else if ((ns = deadline - System.nanoTime()) <= 0L) break; // timeout else if ((ms = TimeUnit.NANOSECONDS.toMillis(ns)) <= 0L) ms = 1L; // avoid 0 for timed wait if ((block = tryCompensate(w)) != 0) { task.internalWait(ms); CTL.getAndAdd(this, (block > 0) ? RC_UNIT : 0L); } s = task.status; } } } return s; } /** * Runs tasks until {@code isQuiescent()}. Rather than blocking * when tasks cannot be found, rescans until all others cannot * find tasks either. */ final void helpQuiescePool(WorkQueue w) { int prevSrc = w.source; int seed = ThreadLocalRandom.nextSecondarySeed(); int r = seed >>> 16, step = r | 1; for (int source = prevSrc, released = -1;;) { // -1 until known ForkJoinTask<?> localTask; WorkQueue[] ws; while ((localTask = w.nextLocalTask()) != null) localTask.doExec(); if (w.phase >= 0 && released == -1) released = 1; boolean quiet = true, empty = true; int n = (ws = workQueues) == null ? 0 : ws.length; for (int m = n - 1; n > 0; r += step, --n) { WorkQueue q; int b; if ((q = ws[r & m]) != null) { int qs = q.source; if (q.top != (b = q.base)) { quiet = empty = false; ForkJoinTask<?>[] a; int cap, k; int qid = q.id; if ((a = q.array) != null && (cap = a.length) > 0) { if (released == 0) { // increment released = 1; CTL.getAndAdd(this, RC_UNIT); } ForkJoinTask<?> t = (ForkJoinTask<?>) QA.getAcquire(a, k = (cap - 1) & b); if (q.base == b++ && t != null && QA.compareAndSet(a, k, t, null)) { q.base = b; w.source = qid; t.doExec(); w.source = source = prevSrc; } } break; } else if ((qs & QUIET) == 0) quiet = false; } } if (quiet) { if (released == 0) CTL.getAndAdd(this, RC_UNIT); w.source = prevSrc; break; } else if (empty) { if (source != QUIET) w.source = source = QUIET; if (released == 1) { // decrement released = 0; CTL.getAndAdd(this, RC_MASK & -RC_UNIT); } } } } /** * Scans for and returns a polled task, if available. * Used only for untracked polls. * * @param submissionsOnly if true, only scan submission queues */ private ForkJoinTask<?> pollScan(boolean submissionsOnly) { WorkQueue[] ws; int n; rescan: while ((mode & STOP) == 0 && (ws = workQueues) != null && (n = ws.length) > 0) { int m = n - 1; int r = ThreadLocalRandom.nextSecondarySeed(); int h = r >>> 16; int origin, step; if (submissionsOnly) { origin = (r & ~1) & m; // even indices and steps step = (h & ~1) | 2; } else { origin = r & m; step = h | 1; } boolean nonempty = false; for (int i = origin, oldSum = 0, checkSum = 0;;) { WorkQueue q; if ((q = ws[i]) != null) { int b; ForkJoinTask<?> t; if (q.top - (b = q.base) > 0) { nonempty = true; if ((t = q.poll()) != null) return t; } else checkSum += b + q.id; } if ((i = (i + step) & m) == origin) { if (!nonempty && oldSum == (oldSum = checkSum)) break rescan; checkSum = 0; nonempty = false; } } } return null; } /** * Gets and removes a local or stolen task for the given worker. * * @return a task, if available */ final ForkJoinTask<?> nextTaskFor(WorkQueue w) { ForkJoinTask<?> t; if (w == null || (t = w.nextLocalTask()) == null) t = pollScan(false); return t; } // External operations /** * Adds the given task to a submission queue at submitter's * current queue, creating one if null or contended. * * @param task the task. Caller must ensure non-null. */ final void externalPush(ForkJoinTask<?> task) { int r; // initialize caller's probe if ((r = ThreadLocalRandom.getProbe()) == 0) { ThreadLocalRandom.localInit(); r = ThreadLocalRandom.getProbe(); } for (;;) { WorkQueue q; int md = mode, n; WorkQueue[] ws = workQueues; if ((md & SHUTDOWN) != 0 || ws == null || (n = ws.length) <= 0) throw new RejectedExecutionException(); else if ((q = ws[(n - 1) & r & SQMASK]) == null) { // add queue int qid = (r | QUIET) & ~(FIFO | OWNED); Object lock = workerNamePrefix; ForkJoinTask<?>[] qa = new ForkJoinTask<?>[INITIAL_QUEUE_CAPACITY]; q = new WorkQueue(this, null); q.array = qa; q.id = qid; q.source = QUIET; if (lock != null) { // unless disabled, lock pool to install synchronized (lock) { WorkQueue[] vs; int i, vn; if ((vs = workQueues) != null && (vn = vs.length) > 0 && vs[i = qid & (vn - 1) & SQMASK] == null) vs[i] = q; // else another thread already installed } } } else if (!q.tryLockPhase()) // move if busy r = ThreadLocalRandom.advanceProbe(r); else { if (q.lockedPush(task)) signalWork(null); return; } } } /** * Pushes a possibly-external submission. */ private <T> ForkJoinTask<T> externalSubmit(ForkJoinTask<T> task) { Thread t; ForkJoinWorkerThread w; WorkQueue q; if (task == null) throw new NullPointerException(); if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) && (w = (ForkJoinWorkerThread)t).pool == this && (q = w.workQueue) != null) q.push(task); else externalPush(task); return task; } /** * Returns common pool queue for an external thread. */ static WorkQueue commonSubmitterQueue() { ForkJoinPool p = common; int r = ThreadLocalRandom.getProbe(); WorkQueue[] ws; int n; return (p != null && (ws = p.workQueues) != null && (n = ws.length) > 0) ? ws[(n - 1) & r & SQMASK] : null; } /** * Performs tryUnpush for an external submitter. */ final boolean tryExternalUnpush(ForkJoinTask<?> task) { int r = ThreadLocalRandom.getProbe(); WorkQueue[] ws; WorkQueue w; int n; return ((ws = workQueues) != null && (n = ws.length) > 0 && (w = ws[(n - 1) & r & SQMASK]) != null && w.tryLockedUnpush(task)); } /** * Performs helpComplete for an external submitter. */ final int externalHelpComplete(CountedCompleter<?> task, int maxTasks) { int r = ThreadLocalRandom.getProbe(); WorkQueue[] ws; WorkQueue w; int n; return ((ws = workQueues) != null && (n = ws.length) > 0 && (w = ws[(n - 1) & r & SQMASK]) != null) ? w.helpCC(task, maxTasks, true) : 0; } /** * Tries to steal and run tasks within the target's computation. * The maxTasks argument supports external usages; internal calls * use zero, allowing unbounded steps (external calls trap * non-positive values). * * @param w caller * @param maxTasks if non-zero, the maximum number of other tasks to run * @return task status on exit */ final int helpComplete(WorkQueue w, CountedCompleter<?> task, int maxTasks) { return (w == null) ? 0 : w.helpCC(task, maxTasks, false); } /** * Returns a cheap heuristic guide for task partitioning when * programmers, frameworks, tools, or languages have little or no * idea about task granularity. In essence, by offering this * method, we ask users only about tradeoffs in overhead vs * expected throughput and its variance, rather than how finely to * partition tasks. * * In a steady state strict (tree-structured) computation, each * thread makes available for stealing enough tasks for other * threads to remain active. Inductively, if all threads play by * the same rules, each thread should make available only a * constant number of tasks. * * The minimum useful constant is just 1. But using a value of 1 * would require immediate replenishment upon each steal to * maintain enough tasks, which is infeasible. Further, * partitionings/granularities of offered tasks should minimize * steal rates, which in general means that threads nearer the top * of computation tree should generate more than those nearer the * bottom. In perfect steady state, each thread is at * approximately the same level of computation tree. However, * producing extra tasks amortizes the uncertainty of progress and * diffusion assumptions. * * So, users will want to use values larger (but not much larger) * than 1 to both smooth over transient shortages and hedge * against uneven progress; as traded off against the cost of * extra task overhead. We leave the user to pick a threshold * value to compare with the results of this call to guide * decisions, but recommend values such as 3. * * When all threads are active, it is on average OK to estimate * surplus strictly locally. In steady-state, if one thread is * maintaining say 2 surplus tasks, then so are others. So we can * just use estimated queue length. However, this strategy alone * leads to serious mis-estimates in some non-steady-state * conditions (ramp-up, ramp-down, other stalls). We can detect * many of these by further considering the number of "idle" * threads, that are known to have zero queued tasks, so * compensate by a factor of (#idle/#active) threads. */ static int getSurplusQueuedTaskCount() { Thread t; ForkJoinWorkerThread wt; ForkJoinPool pool; WorkQueue q; if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) && (pool = (wt = (ForkJoinWorkerThread)t).pool) != null && (q = wt.workQueue) != null) { int p = pool.mode & SMASK; int a = p + (int)(pool.ctl >> RC_SHIFT); int n = q.top - q.base; return n - (a > (p >>>= 1) ? 0 : a > (p >>>= 1) ? 1 : a > (p >>>= 1) ? 2 : a > (p >>>= 1) ? 4 : 8); } return 0; } // Termination /** * Possibly initiates and/or completes termination. * * @param now if true, unconditionally terminate, else only * if no work and no active workers * @param enable if true, terminate when next possible * @return true if terminating or terminated */ private boolean tryTerminate(boolean now, boolean enable) { int md; // 3 phases: try to set SHUTDOWN, then STOP, then TERMINATED while (((md = mode) & SHUTDOWN) == 0) { if (!enable || this == common) // cannot shutdown return false; else MODE.compareAndSet(this, md, md | SHUTDOWN); } while (((md = mode) & STOP) == 0) { // try to initiate termination if (!now) { // check if quiescent & empty for (long oldSum = 0L;;) { // repeat until stable boolean running = false; long checkSum = ctl; WorkQueue[] ws = workQueues; if ((md & SMASK) + (int)(checkSum >> RC_SHIFT) > 0) running = true; else if (ws != null) { WorkQueue w; for (int i = 0; i < ws.length; ++i) { if ((w = ws[i]) != null) { int s = w.source, p = w.phase; int d = w.id, b = w.base; if (b != w.top || ((d & 1) == 1 && (s >= 0 || p >= 0))) { running = true; break; // working, scanning, or have work } checkSum += (((long)s << 48) + ((long)p << 32) + ((long)b << 16) + (long)d); } } } if (((md = mode) & STOP) != 0) break; // already triggered else if (running) return false; else if (workQueues == ws && oldSum == (oldSum = checkSum)) break; } } if ((md & STOP) == 0) MODE.compareAndSet(this, md, md | STOP); } while (((md = mode) & TERMINATED) == 0) { // help terminate others for (long oldSum = 0L;;) { // repeat until stable WorkQueue[] ws; WorkQueue w; long checkSum = ctl; if ((ws = workQueues) != null) { for (int i = 0; i < ws.length; ++i) { if ((w = ws[i]) != null) { ForkJoinWorkerThread wt = w.owner; w.cancelAll(); // clear queues if (wt != null) { try { // unblock join or park wt.interrupt(); } catch (Throwable ignore) { } } checkSum += ((long)w.phase << 32) + w.base; } } } if (((md = mode) & TERMINATED) != 0 || (workQueues == ws && oldSum == (oldSum = checkSum))) break; } if ((md & TERMINATED) != 0) break; else if ((md & SMASK) + (short)(ctl >>> TC_SHIFT) > 0) break; else if (MODE.compareAndSet(this, md, md | TERMINATED)) { synchronized (this) { notifyAll(); // for awaitTermination } break; } } return true; } // Exported methods // Constructors /** * Creates a {@code ForkJoinPool} with parallelism equal to {@link * java.lang.Runtime#availableProcessors}, using defaults for all * other parameters (see {@link #ForkJoinPool(int, * ForkJoinWorkerThreadFactory, UncaughtExceptionHandler, boolean, * int, int, int, Predicate, long, TimeUnit)}). * * @throws SecurityException if a security manager exists and * the caller is not permitted to modify threads * because it does not hold {@link * java.lang.RuntimePermission}{@code ("modifyThread")} */ public ForkJoinPool() { this(Math.min(MAX_CAP, Runtime.getRuntime().availableProcessors()), defaultForkJoinWorkerThreadFactory, null, false, 0, MAX_CAP, 1, null, DEFAULT_KEEPALIVE, TimeUnit.MILLISECONDS); } /** * Creates a {@code ForkJoinPool} with the indicated parallelism * level, using defaults for all other parameters (see {@link * #ForkJoinPool(int, ForkJoinWorkerThreadFactory, * UncaughtExceptionHandler, boolean, int, int, int, Predicate, * long, TimeUnit)}). * * @param parallelism the parallelism level * @throws IllegalArgumentException if parallelism less than or * equal to zero, or greater than implementation limit * @throws SecurityException if a security manager exists and * the caller is not permitted to modify threads * because it does not hold {@link * java.lang.RuntimePermission}{@code ("modifyThread")} */ public ForkJoinPool(int parallelism) { this(parallelism, defaultForkJoinWorkerThreadFactory, null, false, 0, MAX_CAP, 1, null, DEFAULT_KEEPALIVE, TimeUnit.MILLISECONDS); } /** * Creates a {@code ForkJoinPool} with the given parameters (using * defaults for others -- see {@link #ForkJoinPool(int, * ForkJoinWorkerThreadFactory, UncaughtExceptionHandler, boolean, * int, int, int, Predicate, long, TimeUnit)}). * * @param parallelism the parallelism level. For default value, * use {@link java.lang.Runtime#availableProcessors}. * @param factory the factory for creating new threads. For default value, * use {@link #defaultForkJoinWorkerThreadFactory}. * @param handler the handler for internal worker threads that * terminate due to unrecoverable errors encountered while executing * tasks. For default value, use {@code null}. * @param asyncMode if true, * establishes local first-in-first-out scheduling mode for forked * tasks that are never joined. This mode may be more appropriate * than default locally stack-based mode in applications in which * worker threads only process event-style asynchronous tasks. * For default value, use {@code false}. * @throws IllegalArgumentException if parallelism less than or * equal to zero, or greater than implementation limit * @throws NullPointerException if the factory is null * @throws SecurityException if a security manager exists and * the caller is not permitted to modify threads * because it does not hold {@link * java.lang.RuntimePermission}{@code ("modifyThread")} */ public ForkJoinPool(int parallelism, ForkJoinWorkerThreadFactory factory, UncaughtExceptionHandler handler, boolean asyncMode) { this(parallelism, factory, handler, asyncMode, 0, MAX_CAP, 1, null, DEFAULT_KEEPALIVE, TimeUnit.MILLISECONDS); } /** * Creates a {@code ForkJoinPool} with the given parameters. * * @param parallelism the parallelism level. For default value, * use {@link java.lang.Runtime#availableProcessors}. * * @param factory the factory for creating new threads. For * default value, use {@link #defaultForkJoinWorkerThreadFactory}. * * @param handler the handler for internal worker threads that * terminate due to unrecoverable errors encountered while * executing tasks. For default value, use {@code null}. * * @param asyncMode if true, establishes local first-in-first-out * scheduling mode for forked tasks that are never joined. This * mode may be more appropriate than default locally stack-based * mode in applications in which worker threads only process * event-style asynchronous tasks. For default value, use {@code * false}. * * @param corePoolSize the number of threads to keep in the pool * (unless timed out after an elapsed keep-alive). Normally (and * by default) this is the same value as the parallelism level, * but may be set to a larger value to reduce dynamic overhead if * tasks regularly block. Using a smaller value (for example * {@code 0}) has the same effect as the default. * * @param maximumPoolSize the maximum number of threads allowed. * When the maximum is reached, attempts to replace blocked * threads fail. (However, because creation and termination of * different threads may overlap, and may be managed by the given * thread factory, this value may be transiently exceeded.) To * arrange the same value as is used by default for the common * pool, use {@code 256} plus the {@code parallelism} level. (By * default, the common pool allows a maximum of 256 spare * threads.) Using a value (for example {@code * Integer.MAX_VALUE}) larger than the implementation's total * thread limit has the same effect as using this limit (which is * the default). * * @param minimumRunnable the minimum allowed number of core * threads not blocked by a join or {@link ManagedBlocker}. To * ensure progress, when too few unblocked threads exist and * unexecuted tasks may exist, new threads are constructed, up to * the given maximumPoolSize. For the default value, use {@code * 1}, that ensures liveness. A larger value might improve * throughput in the presence of blocked activities, but might * not, due to increased overhead. A value of zero may be * acceptable when submitted tasks cannot have dependencies * requiring additional threads. * * @param saturate if non-null, a predicate invoked upon attempts * to create more than the maximum total allowed threads. By * default, when a thread is about to block on a join or {@link * ManagedBlocker}, but cannot be replaced because the * maximumPoolSize would be exceeded, a {@link * RejectedExecutionException} is thrown. But if this predicate * returns {@code true}, then no exception is thrown, so the pool * continues to operate with fewer than the target number of * runnable threads, which might not ensure progress. * * @param keepAliveTime the elapsed time since last use before * a thread is terminated (and then later replaced if needed). * For the default value, use {@code 60, TimeUnit.SECONDS}. * * @param unit the time unit for the {@code keepAliveTime} argument * * @throws IllegalArgumentException if parallelism is less than or * equal to zero, or is greater than implementation limit, * or if maximumPoolSize is less than parallelism, * of if the keepAliveTime is less than or equal to zero. * @throws NullPointerException if the factory is null * @throws SecurityException if a security manager exists and * the caller is not permitted to modify threads * because it does not hold {@link * java.lang.RuntimePermission}{@code ("modifyThread")} * @since 9 */ public ForkJoinPool(int parallelism, ForkJoinWorkerThreadFactory factory, UncaughtExceptionHandler handler, boolean asyncMode, int corePoolSize, int maximumPoolSize, int minimumRunnable, Predicate<? super ForkJoinPool> saturate, long keepAliveTime, TimeUnit unit) { // check, encode, pack parameters if (parallelism <= 0 || parallelism > MAX_CAP || maximumPoolSize < parallelism || keepAliveTime <= 0L) throw new IllegalArgumentException(); if (factory == null) throw new NullPointerException(); long ms = Math.max(unit.toMillis(keepAliveTime), TIMEOUT_SLOP); int corep = Math.min(Math.max(corePoolSize, parallelism), MAX_CAP); long c = ((((long)(-corep) << TC_SHIFT) & TC_MASK) | (((long)(-parallelism) << RC_SHIFT) & RC_MASK)); int m = parallelism | (asyncMode ? FIFO : 0); int maxSpares = Math.min(maximumPoolSize, MAX_CAP) - parallelism; int minAvail = Math.min(Math.max(minimumRunnable, 0), MAX_CAP); int b = ((minAvail - parallelism) & SMASK) | (maxSpares << SWIDTH); int n = (parallelism > 1) ? parallelism - 1 : 1; // at least 2 slots n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16; n = (n + 1) << 1; // power of two, including space for submission queues this.workerNamePrefix = "ForkJoinPool-" + nextPoolId() + "-worker-"; this.workQueues = new WorkQueue[n]; this.factory = factory; this.ueh = handler; this.saturate = saturate; this.keepAlive = ms; this.bounds = b; this.mode = m; this.ctl = c; checkPermission(); } private static Object newInstanceFromSystemProperty(String property) throws ReflectiveOperationException { String className = System.getProperty(property); return (className == null) ? null : ClassLoader.getSystemClassLoader().loadClass(className) .getConstructor().newInstance(); } /** * Constructor for common pool using parameters possibly * overridden by system properties */ private ForkJoinPool(byte forCommonPoolOnly) { int parallelism = -1; ForkJoinWorkerThreadFactory fac = null; UncaughtExceptionHandler handler = null; try { // ignore exceptions in accessing/parsing properties String pp = System.getProperty ("java.util.concurrent.ForkJoinPool.common.parallelism"); if (pp != null) parallelism = Integer.parseInt(pp); fac = (ForkJoinWorkerThreadFactory) newInstanceFromSystemProperty( "java.util.concurrent.ForkJoinPool.common.threadFactory"); handler = (UncaughtExceptionHandler) newInstanceFromSystemProperty( "java.util.concurrent.ForkJoinPool.common.exceptionHandler"); } catch (Exception ignore) { } if (fac == null) { if (System.getSecurityManager() == null) fac = defaultForkJoinWorkerThreadFactory; else // use security-managed default fac = new InnocuousForkJoinWorkerThreadFactory(); } if (parallelism < 0 && // default 1 less than #cores (parallelism = Runtime.getRuntime().availableProcessors() - 1) <= 0) parallelism = 1; if (parallelism > MAX_CAP) parallelism = MAX_CAP; long c = ((((long)(-parallelism) << TC_SHIFT) & TC_MASK) | (((long)(-parallelism) << RC_SHIFT) & RC_MASK)); int b = ((1 - parallelism) & SMASK) | (COMMON_MAX_SPARES << SWIDTH); int n = (parallelism > 1) ? parallelism - 1 : 1; n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16; n = (n + 1) << 1; this.workerNamePrefix = "ForkJoinPool.commonPool-worker-"; this.workQueues = new WorkQueue[n]; this.factory = fac; this.ueh = handler; this.saturate = null; this.keepAlive = DEFAULT_KEEPALIVE; this.bounds = b; this.mode = parallelism; this.ctl = c; } /** * Returns the common pool instance. This pool is statically * constructed; its run state is unaffected by attempts to {@link * #shutdown} or {@link #shutdownNow}. However this pool and any * ongoing processing are automatically terminated upon program * {@link System#exit}. Any program that relies on asynchronous * task processing to complete before program termination should * invoke {@code commonPool().}{@link #awaitQuiescence awaitQuiescence}, * before exit. * * @return the common pool instance * @since 1.8 */ public static ForkJoinPool commonPool() { // assert common != null : "static init error"; return common; } // Execution methods /** * Performs the given task, returning its result upon completion. * If the computation encounters an unchecked Exception or Error, * it is rethrown as the outcome of this invocation. Rethrown * exceptions behave in the same way as regular exceptions, but, * when possible, contain stack traces (as displayed for example * using {@code ex.printStackTrace()}) of both the current thread * as well as the thread actually encountering the exception; * minimally only the latter. * * @param task the task * @param <T> the type of the task's result * @return the task's result * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ public <T> T invoke(ForkJoinTask<T> task) { if (task == null) throw new NullPointerException(); externalSubmit(task); return task.join(); } /** * Arranges for (asynchronous) execution of the given task. * * @param task the task * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ public void execute(ForkJoinTask<?> task) { externalSubmit(task); } // AbstractExecutorService methods /** * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ public void execute(Runnable task) { if (task == null) throw new NullPointerException(); ForkJoinTask<?> job; if (task instanceof ForkJoinTask<?>) // avoid re-wrap job = (ForkJoinTask<?>) task; else job = new ForkJoinTask.RunnableExecuteAction(task); externalSubmit(job); } /** * Submits a ForkJoinTask for execution. * * @param task the task to submit * @param <T> the type of the task's result * @return the task * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ public <T> ForkJoinTask<T> submit(ForkJoinTask<T> task) { return externalSubmit(task); } /** * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ public <T> ForkJoinTask<T> submit(Callable<T> task) { return externalSubmit(new ForkJoinTask.AdaptedCallable<T>(task)); } /** * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ public <T> ForkJoinTask<T> submit(Runnable task, T result) { return externalSubmit(new ForkJoinTask.AdaptedRunnable<T>(task, result)); } /** * @throws NullPointerException if the task is null * @throws RejectedExecutionException if the task cannot be * scheduled for execution */ @SuppressWarnings("unchecked") public ForkJoinTask<?> submit(Runnable task) { if (task == null) throw new NullPointerException(); return externalSubmit((task instanceof ForkJoinTask<?>) ? (ForkJoinTask<Void>) task // avoid re-wrap : new ForkJoinTask.AdaptedRunnableAction(task)); } /** * @throws NullPointerException {@inheritDoc} * @throws RejectedExecutionException {@inheritDoc} */ public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) { // In previous versions of this class, this method constructed // a task to run ForkJoinTask.invokeAll, but now external // invocation of multiple tasks is at least as efficient. ArrayList<Future<T>> futures = new ArrayList<>(tasks.size()); try { for (Callable<T> t : tasks) { ForkJoinTask<T> f = new ForkJoinTask.AdaptedCallable<T>(t); futures.add(f); externalSubmit(f); } for (int i = 0, size = futures.size(); i < size; i++) ((ForkJoinTask<?>)futures.get(i)).quietlyJoin(); return futures; } catch (Throwable t) { for (int i = 0, size = futures.size(); i < size; i++) futures.get(i).cancel(false); throw t; } } /** * Returns the factory used for constructing new workers. * * @return the factory used for constructing new workers */ public ForkJoinWorkerThreadFactory getFactory() { return factory; } /** * Returns the handler for internal worker threads that terminate * due to unrecoverable errors encountered while executing tasks. * * @return the handler, or {@code null} if none */ public UncaughtExceptionHandler getUncaughtExceptionHandler() { return ueh; } /** * Returns the targeted parallelism level of this pool. * * @return the targeted parallelism level of this pool */ public int getParallelism() { int par = mode & SMASK; return (par > 0) ? par : 1; } /** * Returns the targeted parallelism level of the common pool. * * @return the targeted parallelism level of the common pool * @since 1.8 */ public static int getCommonPoolParallelism() { return COMMON_PARALLELISM; } /** * Returns the number of worker threads that have started but not * yet terminated. The result returned by this method may differ * from {@link #getParallelism} when threads are created to * maintain parallelism when others are cooperatively blocked. * * @return the number of worker threads */ public int getPoolSize() { return ((mode & SMASK) + (short)(ctl >>> TC_SHIFT)); } /** * Returns {@code true} if this pool uses local first-in-first-out * scheduling mode for forked tasks that are never joined. * * @return {@code true} if this pool uses async mode */ public boolean getAsyncMode() { return (mode & FIFO) != 0; } /** * Returns an estimate of the number of worker threads that are * not blocked waiting to join tasks or for other managed * synchronization. This method may overestimate the * number of running threads. * * @return the number of worker threads */ public int getRunningThreadCount() { WorkQueue[] ws; WorkQueue w; VarHandle.acquireFence(); int rc = 0; if ((ws = workQueues) != null) { for (int i = 1; i < ws.length; i += 2) { if ((w = ws[i]) != null && w.isApparentlyUnblocked()) ++rc; } } return rc; } /** * Returns an estimate of the number of threads that are currently * stealing or executing tasks. This method may overestimate the * number of active threads. * * @return the number of active threads */ public int getActiveThreadCount() { int r = (mode & SMASK) + (int)(ctl >> RC_SHIFT); return (r <= 0) ? 0 : r; // suppress momentarily negative values } /** * Returns {@code true} if all worker threads are currently idle. * An idle worker is one that cannot obtain a task to execute * because none are available to steal from other threads, and * there are no pending submissions to the pool. This method is * conservative; it might not return {@code true} immediately upon * idleness of all threads, but will eventually become true if * threads remain inactive. * * @return {@code true} if all threads are currently idle */ public boolean isQuiescent() { for (;;) { long c = ctl; int md = mode, pc = md & SMASK; int tc = pc + (short)(c >>> TC_SHIFT); int rc = pc + (int)(c >> RC_SHIFT); if ((md & (STOP | TERMINATED)) != 0) return true; else if (rc > 0) return false; else { WorkQueue[] ws; WorkQueue v; if ((ws = workQueues) != null) { for (int i = 1; i < ws.length; i += 2) { if ((v = ws[i]) != null) { if (v.source > 0) return false; --tc; } } } if (tc == 0 && ctl == c) return true; } } } /** * Returns an estimate of the total number of completed tasks that * were executed by a thread other than their submitter. The * reported value underestimates the actual total number of steals * when the pool is not quiescent. This value may be useful for * monitoring and tuning fork/join programs: in general, steal * counts should be high enough to keep threads busy, but low * enough to avoid overhead and contention across threads. * * @return the number of steals */ public long getStealCount() { long count = stealCount; WorkQueue[] ws; WorkQueue w; if ((ws = workQueues) != null) { for (int i = 1; i < ws.length; i += 2) { if ((w = ws[i]) != null) count += (long)w.nsteals & 0xffffffffL; } } return count; } /** * Returns an estimate of the total number of tasks currently held * in queues by worker threads (but not including tasks submitted * to the pool that have not begun executing). This value is only * an approximation, obtained by iterating across all threads in * the pool. This method may be useful for tuning task * granularities. * * @return the number of queued tasks */ public long getQueuedTaskCount() { WorkQueue[] ws; WorkQueue w; VarHandle.acquireFence(); int count = 0; if ((ws = workQueues) != null) { for (int i = 1; i < ws.length; i += 2) { if ((w = ws[i]) != null) count += w.queueSize(); } } return count; } /** * Returns an estimate of the number of tasks submitted to this * pool that have not yet begun executing. This method may take * time proportional to the number of submissions. * * @return the number of queued submissions */ public int getQueuedSubmissionCount() { WorkQueue[] ws; WorkQueue w; VarHandle.acquireFence(); int count = 0; if ((ws = workQueues) != null) { for (int i = 0; i < ws.length; i += 2) { if ((w = ws[i]) != null) count += w.queueSize(); } } return count; } /** * Returns {@code true} if there are any tasks submitted to this * pool that have not yet begun executing. * * @return {@code true} if there are any queued submissions */ public boolean hasQueuedSubmissions() { WorkQueue[] ws; WorkQueue w; VarHandle.acquireFence(); if ((ws = workQueues) != null) { for (int i = 0; i < ws.length; i += 2) { if ((w = ws[i]) != null && !w.isEmpty()) return true; } } return false; } /** * Removes and returns the next unexecuted submission if one is * available. This method may be useful in extensions to this * class that re-assign work in systems with multiple pools. * * @return the next submission, or {@code null} if none */ protected ForkJoinTask<?> pollSubmission() { return pollScan(true); } /** * Removes all available unexecuted submitted and forked tasks * from scheduling queues and adds them to the given collection, * without altering their execution status. These may include * artificially generated or wrapped tasks. This method is * designed to be invoked only when the pool is known to be * quiescent. Invocations at other times may not remove all * tasks. A failure encountered while attempting to add elements * to collection {@code c} may result in elements being in * neither, either or both collections when the associated * exception is thrown. The behavior of this operation is * undefined if the specified collection is modified while the * operation is in progress. * * @param c the collection to transfer elements into * @return the number of elements transferred */ protected int drainTasksTo(Collection<? super ForkJoinTask<?>> c) { WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t; VarHandle.acquireFence(); int count = 0; if ((ws = workQueues) != null) { for (int i = 0; i < ws.length; ++i) { if ((w = ws[i]) != null) { while ((t = w.poll()) != null) { c.add(t); ++count; } } } } return count; } /** * Returns a string identifying this pool, as well as its state, * including indications of run state, parallelism level, and * worker and task counts. * * @return a string identifying this pool, as well as its state */ public String toString() { // Use a single pass through workQueues to collect counts int md = mode; // read volatile fields first long c = ctl; long st = stealCount; long qt = 0L, qs = 0L; int rc = 0; WorkQueue[] ws; WorkQueue w; if ((ws = workQueues) != null) { for (int i = 0; i < ws.length; ++i) { if ((w = ws[i]) != null) { int size = w.queueSize(); if ((i & 1) == 0) qs += size; else { qt += size; st += (long)w.nsteals & 0xffffffffL; if (w.isApparentlyUnblocked()) ++rc; } } } } int pc = (md & SMASK); int tc = pc + (short)(c >>> TC_SHIFT); int ac = pc + (int)(c >> RC_SHIFT); if (ac < 0) // ignore transient negative ac = 0; String level = ((md & TERMINATED) != 0 ? "Terminated" : (md & STOP) != 0 ? "Terminating" : (md & SHUTDOWN) != 0 ? "Shutting down" : "Running"); return super.toString() + "[" + level + ", parallelism = " + pc + ", size = " + tc + ", active = " + ac + ", running = " + rc + ", steals = " + st + ", tasks = " + qt + ", submissions = " + qs + "]"; } /** * Possibly initiates an orderly shutdown in which previously * submitted tasks are executed, but no new tasks will be * accepted. Invocation has no effect on execution state if this * is the {@link #commonPool()}, and no additional effect if * already shut down. Tasks that are in the process of being * submitted concurrently during the course of this method may or * may not be rejected. * * @throws SecurityException if a security manager exists and * the caller is not permitted to modify threads * because it does not hold {@link * java.lang.RuntimePermission}{@code ("modifyThread")} */ public void shutdown() { checkPermission(); tryTerminate(false, true); } /** * Possibly attempts to cancel and/or stop all tasks, and reject * all subsequently submitted tasks. Invocation has no effect on * execution state if this is the {@link #commonPool()}, and no * additional effect if already shut down. Otherwise, tasks that * are in the process of being submitted or executed concurrently * during the course of this method may or may not be * rejected. This method cancels both existing and unexecuted * tasks, in order to permit termination in the presence of task * dependencies. So the method always returns an empty list * (unlike the case for some other Executors). * * @return an empty list * @throws SecurityException if a security manager exists and * the caller is not permitted to modify threads * because it does not hold {@link * java.lang.RuntimePermission}{@code ("modifyThread")} */ public List<Runnable> shutdownNow() { checkPermission(); tryTerminate(true, true); return Collections.emptyList(); } /** * Returns {@code true} if all tasks have completed following shut down. * * @return {@code true} if all tasks have completed following shut down */ public boolean isTerminated() { return (mode & TERMINATED) != 0; } /** * Returns {@code true} if the process of termination has * commenced but not yet completed. This method may be useful for * debugging. A return of {@code true} reported a sufficient * period after shutdown may indicate that submitted tasks have * ignored or suppressed interruption, or are waiting for I/O, * causing this executor not to properly terminate. (See the * advisory notes for class {@link ForkJoinTask} stating that * tasks should not normally entail blocking operations. But if * they do, they must abort them on interrupt.) * * @return {@code true} if terminating but not yet terminated */ public boolean isTerminating() { int md = mode; return (md & STOP) != 0 && (md & TERMINATED) == 0; } /** * Returns {@code true} if this pool has been shut down. * * @return {@code true} if this pool has been shut down */ public boolean isShutdown() { return (mode & SHUTDOWN) != 0; } /** * Blocks until all tasks have completed execution after a * shutdown request, or the timeout occurs, or the current thread * is interrupted, whichever happens first. Because the {@link * #commonPool()} never terminates until program shutdown, when * applied to the common pool, this method is equivalent to {@link * #awaitQuiescence(long, TimeUnit)} but always returns {@code false}. * * @param timeout the maximum time to wait * @param unit the time unit of the timeout argument * @return {@code true} if this executor terminated and * {@code false} if the timeout elapsed before termination * @throws InterruptedException if interrupted while waiting */ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { if (Thread.interrupted()) throw new InterruptedException(); if (this == common) { awaitQuiescence(timeout, unit); return false; } long nanos = unit.toNanos(timeout); if (isTerminated()) return true; if (nanos <= 0L) return false; long deadline = System.nanoTime() + nanos; synchronized (this) { for (;;) { if (isTerminated()) return true; if (nanos <= 0L) return false; long millis = TimeUnit.NANOSECONDS.toMillis(nanos); wait(millis > 0L ? millis : 1L); nanos = deadline - System.nanoTime(); } } } /** * If called by a ForkJoinTask operating in this pool, equivalent * in effect to {@link ForkJoinTask#helpQuiesce}. Otherwise, * waits and/or attempts to assist performing tasks until this * pool {@link #isQuiescent} or the indicated timeout elapses. * * @param timeout the maximum time to wait * @param unit the time unit of the timeout argument * @return {@code true} if quiescent; {@code false} if the * timeout elapsed. */ public boolean awaitQuiescence(long timeout, TimeUnit unit) { long nanos = unit.toNanos(timeout); ForkJoinWorkerThread wt; Thread thread = Thread.currentThread(); if ((thread instanceof ForkJoinWorkerThread) && (wt = (ForkJoinWorkerThread)thread).pool == this) { helpQuiescePool(wt.workQueue); return true; } else { for (long startTime = System.nanoTime();;) { ForkJoinTask<?> t; if ((t = pollScan(false)) != null) t.doExec(); else if (isQuiescent()) return true; else if ((System.nanoTime() - startTime) > nanos) return false; else Thread.yield(); // cannot block } } } /** * Waits and/or attempts to assist performing tasks indefinitely * until the {@link #commonPool()} {@link #isQuiescent}. */ static void quiesceCommonPool() { common.awaitQuiescence(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } /** * Interface for extending managed parallelism for tasks running * in {@link ForkJoinPool}s. * * <p>A {@code ManagedBlocker} provides two methods. Method * {@link #isReleasable} must return {@code true} if blocking is * not necessary. Method {@link #block} blocks the current thread * if necessary (perhaps internally invoking {@code isReleasable} * before actually blocking). These actions are performed by any * thread invoking {@link ForkJoinPool#managedBlock(ManagedBlocker)}. * The unusual methods in this API accommodate synchronizers that * may, but don't usually, block for long periods. Similarly, they * allow more efficient internal handling of cases in which * additional workers may be, but usually are not, needed to * ensure sufficient parallelism. Toward this end, * implementations of method {@code isReleasable} must be amenable * to repeated invocation. * * <p>For example, here is a ManagedBlocker based on a * ReentrantLock: * <pre> {@code * class ManagedLocker implements ManagedBlocker { * final ReentrantLock lock; * boolean hasLock = false; * ManagedLocker(ReentrantLock lock) { this.lock = lock; } * public boolean block() { * if (!hasLock) * lock.lock(); * return true; * } * public boolean isReleasable() { * return hasLock || (hasLock = lock.tryLock()); * } * }}</pre> * * <p>Here is a class that possibly blocks waiting for an * item on a given queue: * <pre> {@code * class QueueTaker<E> implements ManagedBlocker { * final BlockingQueue<E> queue; * volatile E item = null; * QueueTaker(BlockingQueue<E> q) { this.queue = q; } * public boolean block() throws InterruptedException { * if (item == null) * item = queue.take(); * return true; * } * public boolean isReleasable() { * return item != null || (item = queue.poll()) != null; * } * public E getItem() { // call after pool.managedBlock completes * return item; * } * }}</pre> */ public static interface ManagedBlocker { /** * Possibly blocks the current thread, for example waiting for * a lock or condition. * * @return {@code true} if no additional blocking is necessary * (i.e., if isReleasable would return true) * @throws InterruptedException if interrupted while waiting * (the method is not required to do so, but is allowed to) */ boolean block() throws InterruptedException; /** * Returns {@code true} if blocking is unnecessary. * @return {@code true} if blocking is unnecessary */ boolean isReleasable(); } /** * Runs the given possibly blocking task. When {@linkplain * ForkJoinTask#inForkJoinPool() running in a ForkJoinPool}, this * method possibly arranges for a spare thread to be activated if * necessary to ensure sufficient parallelism while the current * thread is blocked in {@link ManagedBlocker#block blocker.block()}. * * <p>This method repeatedly calls {@code blocker.isReleasable()} and * {@code blocker.block()} until either method returns {@code true}. * Every call to {@code blocker.block()} is preceded by a call to * {@code blocker.isReleasable()} that returned {@code false}. * * <p>If not running in a ForkJoinPool, this method is * behaviorally equivalent to * <pre> {@code * while (!blocker.isReleasable()) * if (blocker.block()) * break;}</pre> * * If running in a ForkJoinPool, the pool may first be expanded to * ensure sufficient parallelism available during the call to * {@code blocker.block()}. * * @param blocker the blocker task * @throws InterruptedException if {@code blocker.block()} did so */ public static void managedBlock(ManagedBlocker blocker) throws InterruptedException { if (blocker == null) throw new NullPointerException(); ForkJoinPool p; ForkJoinWorkerThread wt; WorkQueue w; Thread t = Thread.currentThread(); if ((t instanceof ForkJoinWorkerThread) && (p = (wt = (ForkJoinWorkerThread)t).pool) != null && (w = wt.workQueue) != null) { int block; while (!blocker.isReleasable()) { if ((block = p.tryCompensate(w)) != 0) { try { do {} while (!blocker.isReleasable() && !blocker.block()); } finally { CTL.getAndAdd(p, (block > 0) ? RC_UNIT : 0L); } break; } } } else { do {} while (!blocker.isReleasable() && !blocker.block()); } } /** * If the given executor is a ForkJoinPool, poll and execute * AsynchronousCompletionTasks from worker's queue until none are * available or blocker is released. */ static void helpAsyncBlocker(Executor e, ManagedBlocker blocker) { if (e instanceof ForkJoinPool) { WorkQueue w; ForkJoinWorkerThread wt; WorkQueue[] ws; int r, n; ForkJoinPool p = (ForkJoinPool)e; Thread thread = Thread.currentThread(); if (thread instanceof ForkJoinWorkerThread && (wt = (ForkJoinWorkerThread)thread).pool == p) w = wt.workQueue; else if ((r = ThreadLocalRandom.getProbe()) != 0 && (ws = p.workQueues) != null && (n = ws.length) > 0) w = ws[(n - 1) & r & SQMASK]; else w = null; if (w != null) w.helpAsyncBlocker(blocker); } } // AbstractExecutorService overrides. These rely on undocumented // fact that ForkJoinTask.adapt returns ForkJoinTasks that also // implement RunnableFuture. protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) { return new ForkJoinTask.AdaptedRunnable<T>(runnable, value); } protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) { return new ForkJoinTask.AdaptedCallable<T>(callable); } // VarHandle mechanics private static final VarHandle CTL; private static final VarHandle MODE; static final VarHandle QA; static { try { MethodHandles.Lookup l = MethodHandles.lookup(); CTL = l.findVarHandle(ForkJoinPool.class, "ctl", long.class); MODE = l.findVarHandle(ForkJoinPool.class, "mode", int.class); QA = MethodHandles.arrayElementVarHandle(ForkJoinTask[].class); } catch (ReflectiveOperationException e) { throw new ExceptionInInitializerError(e); } // Reduce the risk of rare disastrous classloading in first call to // LockSupport.park: https://bugs.openjdk.java.net/browse/JDK-8074773 Class<?> ensureLoaded = LockSupport.class; int commonMaxSpares = DEFAULT_COMMON_MAX_SPARES; try { String p = System.getProperty ("java.util.concurrent.ForkJoinPool.common.maximumSpares"); if (p != null) commonMaxSpares = Integer.parseInt(p); } catch (Exception ignore) {} COMMON_MAX_SPARES = commonMaxSpares; defaultForkJoinWorkerThreadFactory = new DefaultForkJoinWorkerThreadFactory(); modifyThreadPermission = new RuntimePermission("modifyThread"); common = AccessController.doPrivileged(new PrivilegedAction<>() { public ForkJoinPool run() { return new ForkJoinPool((byte)0); }}); COMMON_PARALLELISM = Math.max(common.mode & SMASK, 1); } /** * Factory for innocuous worker threads. */ private static final class InnocuousForkJoinWorkerThreadFactory implements ForkJoinWorkerThreadFactory { /** * An ACC to restrict permissions for the factory itself. * The constructed workers have no permissions set. */ private static final AccessControlContext ACC = contextWithPermissions( modifyThreadPermission, new RuntimePermission("enableContextClassLoaderOverride"), new RuntimePermission("modifyThreadGroup"), new RuntimePermission("getClassLoader"), new RuntimePermission("setContextClassLoader")); public final ForkJoinWorkerThread newThread(ForkJoinPool pool) { return AccessController.doPrivileged( new PrivilegedAction<>() { public ForkJoinWorkerThread run() { return new ForkJoinWorkerThread. InnocuousForkJoinWorkerThread(pool); }}, ACC); } } }
package pl.agh.wd.model; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; import org.hibernate.annotations.NotFound; import org.hibernate.annotations.NotFoundAction; import javax.persistence.*; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @Entity @Table(name = "course") @Getter @Setter @NoArgsConstructor public class Course { @Id @GeneratedValue @OrderColumn private long id; @Column(name="lecture_time", nullable = true) private int lecture_time; @Column(name="laboratory_time", nullable = true) private int laboratory_time; @Column(name="ects") private int ects; @Column(name="exam") private boolean exam; @Column(name="name") private String name; @Column(name="semester") private int semester; @OneToMany(fetch = FetchType.EAGER, mappedBy = "course") @NotFound(action = NotFoundAction.IGNORE) private Set<CourseStudent> courseStudents; @ManyToMany(fetch = FetchType.EAGER) @JoinTable(name = "course_lecturer", joinColumns = @JoinColumn(name = "course_id", referencedColumnName = "id"), inverseJoinColumns = @JoinColumn(name = "lecturer_id", referencedColumnName = "user_id")) private Set<Lecturer> courseLecturers; @ManyToOne @JoinColumn(name="fieldofstudy_id", nullable=true) private FieldOfStudy fieldOfStudy; public Course(String name, int lecture_time, int laboratory_time, int ects, boolean exam) { this.name = name; this.lecture_time = lecture_time; this.laboratory_time = laboratory_time; this.ects = ects; this.exam = exam; } public Course(String name, int lecture_time, int laboratory_time, int ects, boolean exam, CourseStudent... courseStudents) { this(name, lecture_time, laboratory_time, ects, exam); for(CourseStudent courseStudent : courseStudents) courseStudent.setCourse(this); this.courseStudents = Stream.of(courseStudents).collect(Collectors.toSet()); } }
package org.checkerframework.dataflow.expression; import com.sun.source.tree.ArrayAccessTree; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.IdentifierTree; import com.sun.source.tree.LiteralTree; import com.sun.source.tree.MemberSelectTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.NewArrayTree; import com.sun.source.tree.UnaryTree; import com.sun.source.tree.VariableTree; import com.sun.source.util.TreePath; import java.util.ArrayList; import java.util.List; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.TypeMirror; import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.dataflow.cfg.node.ArrayAccessNode; import org.checkerframework.dataflow.cfg.node.ArrayCreationNode; import org.checkerframework.dataflow.cfg.node.BinaryOperationNode; import org.checkerframework.dataflow.cfg.node.ClassNameNode; import org.checkerframework.dataflow.cfg.node.ExplicitThisLiteralNode; import org.checkerframework.dataflow.cfg.node.FieldAccessNode; import org.checkerframework.dataflow.cfg.node.LocalVariableNode; import org.checkerframework.dataflow.cfg.node.MethodInvocationNode; import org.checkerframework.dataflow.cfg.node.NarrowingConversionNode; import org.checkerframework.dataflow.cfg.node.Node; import org.checkerframework.dataflow.cfg.node.StringConversionNode; import org.checkerframework.dataflow.cfg.node.SuperNode; import org.checkerframework.dataflow.cfg.node.ThisLiteralNode; import org.checkerframework.dataflow.cfg.node.ValueLiteralNode; import org.checkerframework.dataflow.cfg.node.WideningConversionNode; import org.checkerframework.dataflow.util.PurityUtils; import org.checkerframework.javacutil.AnnotationProvider; import org.checkerframework.javacutil.BugInCF; import org.checkerframework.javacutil.ElementUtils; import org.checkerframework.javacutil.TreeUtils; /** * Collection of classes and helper functions to represent Java expressions about which the * org.checkerframework.dataflow analysis can possibly infer facts. Expressions include: * * <ul> * <li>Field accesses (e.g., <em>o.f</em>) * <li>Local variables (e.g., <em>l</em>) * <li>This reference (e.g., <em>this</em>) * <li>Pure method calls (e.g., <em>o.m()</em>) * <li>Unknown other expressions to mark that something else was present. * </ul> */ public class FlowExpressions { /** * Returns the internal representation (as {@link FieldAccess}) of a {@link FieldAccessNode}. * Can contain {@link Unknown} as receiver. * * @return the internal representation (as {@link FieldAccess}) of a {@link FieldAccessNode}. * Can contain {@link Unknown} as receiver. */ public static FieldAccess internalReprOfFieldAccess( AnnotationProvider provider, FieldAccessNode node) { Receiver receiver; Node receiverNode = node.getReceiver(); if (node.isStatic()) { receiver = new ClassName(receiverNode.getType()); } else { receiver = internalReprOf(provider, receiverNode); } return new FieldAccess(receiver, node); } /** * Returns the internal representation (as {@link FieldAccess}) of a {@link FieldAccessNode}. * Can contain {@link Unknown} as receiver. * * @return the internal representation (as {@link FieldAccess}) of a {@link FieldAccessNode}. * Can contain {@link Unknown} as receiver. */ public static ArrayAccess internalReprOfArrayAccess( AnnotationProvider provider, ArrayAccessNode node) { Receiver receiver = internalReprOf(provider, node.getArray()); Receiver index = internalReprOf(provider, node.getIndex()); return new ArrayAccess(node.getType(), receiver, index); } /** * We ignore operations such as widening and narrowing when computing the internal * representation. * * @return the internal representation (as {@link Receiver}) of any {@link Node}. Might contain * {@link Unknown}. */ public static Receiver internalReprOf(AnnotationProvider provider, Node receiverNode) { return internalReprOf(provider, receiverNode, false); } /** * We ignore operations such as widening and narrowing when computing the internal * representation. * * @return the internal representation (as {@link Receiver}) of any {@link Node}. Might contain * {@link Unknown}. */ public static Receiver internalReprOf( AnnotationProvider provider, Node receiverNode, boolean allowNonDeterministic) { Receiver receiver = null; if (receiverNode instanceof FieldAccessNode) { FieldAccessNode fan = (FieldAccessNode) receiverNode; if (fan.getFieldName().equals("this")) { // For some reason, "className.this" is considered a field access. // We right this wrong here. receiver = new ThisReference(fan.getReceiver().getType()); } else if (fan.getFieldName().equals("class")) { // "className.class" is considered a field access. This makes sense, // since .class is similar to a field access which is the equivalent // of a call to getClass(). However for the purposes of dataflow // analysis, and value stores, this is the equivalent of a ClassNameNode. receiver = new ClassName(fan.getReceiver().getType()); } else { receiver = internalReprOfFieldAccess(provider, fan); } } else if (receiverNode instanceof ExplicitThisLiteralNode) { receiver = new ThisReference(receiverNode.getType()); } else if (receiverNode instanceof ThisLiteralNode) { receiver = new ThisReference(receiverNode.getType()); } else if (receiverNode instanceof SuperNode) { receiver = new ThisReference(receiverNode.getType()); } else if (receiverNode instanceof LocalVariableNode) { LocalVariableNode lv = (LocalVariableNode) receiverNode; receiver = new LocalVariable(lv); } else if (receiverNode instanceof ArrayAccessNode) { ArrayAccessNode a = (ArrayAccessNode) receiverNode; receiver = internalReprOfArrayAccess(provider, a); } else if (receiverNode instanceof StringConversionNode) { // ignore string conversion return internalReprOf(provider, ((StringConversionNode) receiverNode).getOperand()); } else if (receiverNode instanceof WideningConversionNode) { // ignore widening return internalReprOf(provider, ((WideningConversionNode) receiverNode).getOperand()); } else if (receiverNode instanceof NarrowingConversionNode) { // ignore narrowing return internalReprOf(provider, ((NarrowingConversionNode) receiverNode).getOperand()); } else if (receiverNode instanceof BinaryOperationNode) { BinaryOperationNode bopn = (BinaryOperationNode) receiverNode; return new BinaryOperation( bopn, internalReprOf(provider, bopn.getLeftOperand(), allowNonDeterministic), internalReprOf(provider, bopn.getRightOperand(), allowNonDeterministic)); } else if (receiverNode instanceof ClassNameNode) { ClassNameNode cn = (ClassNameNode) receiverNode; receiver = new ClassName(cn.getType()); } else if (receiverNode instanceof ValueLiteralNode) { ValueLiteralNode vn = (ValueLiteralNode) receiverNode; receiver = new ValueLiteral(vn.getType(), vn); } else if (receiverNode instanceof ArrayCreationNode) { ArrayCreationNode an = (ArrayCreationNode) receiverNode; List<Receiver> dimensions = new ArrayList<>(); for (Node dimension : an.getDimensions()) { dimensions.add(internalReprOf(provider, dimension, allowNonDeterministic)); } List<Receiver> initializers = new ArrayList<>(); for (Node initializer : an.getInitializers()) { initializers.add(internalReprOf(provider, initializer, allowNonDeterministic)); } receiver = new ArrayCreation(an.getType(), dimensions, initializers); } else if (receiverNode instanceof MethodInvocationNode) { MethodInvocationNode mn = (MethodInvocationNode) receiverNode; MethodInvocationTree t = mn.getTree(); if (t == null) { throw new BugInCF("Unexpected null tree for node: " + mn); } assert TreeUtils.isUseOfElement(t) : "@AssumeAssertion(nullness): tree kind"; ExecutableElement invokedMethod = TreeUtils.elementFromUse(t); if (allowNonDeterministic || PurityUtils.isDeterministic(provider, invokedMethod)) { List<Receiver> parameters = new ArrayList<>(); for (Node p : mn.getArguments()) { parameters.add(internalReprOf(provider, p)); } Receiver methodReceiver; if (ElementUtils.isStatic(invokedMethod)) { methodReceiver = new ClassName(mn.getTarget().getReceiver().getType()); } else { methodReceiver = internalReprOf(provider, mn.getTarget().getReceiver()); } receiver = new MethodCall(mn.getType(), invokedMethod, methodReceiver, parameters); } } if (receiver == null) { receiver = new Unknown(receiverNode.getType()); } return receiver; } /** * Returns the internal representation (as {@link Receiver}) of any {@link ExpressionTree}. * Might contain {@link Unknown}. * * @return the internal representation (as {@link Receiver}) of any {@link ExpressionTree}. * Might contain {@link Unknown}. */ public static Receiver internalReprOf( AnnotationProvider provider, ExpressionTree receiverTree) { return internalReprOf(provider, receiverTree, true); } /** * We ignore operations such as widening and narrowing when computing the internal * representation. * * @return the internal representation (as {@link Receiver}) of any {@link ExpressionTree}. * Might contain {@link Unknown}. */ public static Receiver internalReprOf( AnnotationProvider provider, ExpressionTree receiverTree, boolean allowNonDeterministic) { Receiver receiver; switch (receiverTree.getKind()) { case ARRAY_ACCESS: ArrayAccessTree a = (ArrayAccessTree) receiverTree; Receiver arrayAccessExpression = internalReprOf(provider, a.getExpression()); Receiver index = internalReprOf(provider, a.getIndex()); receiver = new ArrayAccess(TreeUtils.typeOf(a), arrayAccessExpression, index); break; case BOOLEAN_LITERAL: case CHAR_LITERAL: case DOUBLE_LITERAL: case FLOAT_LITERAL: case INT_LITERAL: case LONG_LITERAL: case NULL_LITERAL: case STRING_LITERAL: LiteralTree vn = (LiteralTree) receiverTree; receiver = new ValueLiteral(TreeUtils.typeOf(receiverTree), vn.getValue()); break; case NEW_ARRAY: NewArrayTree newArrayTree = (NewArrayTree) receiverTree; List<Receiver> dimensions = new ArrayList<>(); if (newArrayTree.getDimensions() != null) { for (ExpressionTree dimension : newArrayTree.getDimensions()) { dimensions.add(internalReprOf(provider, dimension, allowNonDeterministic)); } } List<Receiver> initializers = new ArrayList<>(); if (newArrayTree.getInitializers() != null) { for (ExpressionTree initializer : newArrayTree.getInitializers()) { initializers.add( internalReprOf(provider, initializer, allowNonDeterministic)); } } receiver = new ArrayCreation(TreeUtils.typeOf(receiverTree), dimensions, initializers); break; case METHOD_INVOCATION: MethodInvocationTree mn = (MethodInvocationTree) receiverTree; assert TreeUtils.isUseOfElement(mn) : "@AssumeAssertion(nullness): tree kind"; ExecutableElement invokedMethod = TreeUtils.elementFromUse(mn); if (PurityUtils.isDeterministic(provider, invokedMethod) || allowNonDeterministic) { List<Receiver> parameters = new ArrayList<>(); for (ExpressionTree p : mn.getArguments()) { parameters.add(internalReprOf(provider, p)); } Receiver methodReceiver; if (ElementUtils.isStatic(invokedMethod)) { methodReceiver = new ClassName(TreeUtils.typeOf(mn.getMethodSelect())); } else { ExpressionTree methodReceiverTree = TreeUtils.getReceiverTree(mn); if (methodReceiverTree != null) { methodReceiver = internalReprOf(provider, methodReceiverTree); } else { methodReceiver = internalReprOfImplicitReceiver(invokedMethod); } } TypeMirror type = TreeUtils.typeOf(mn); receiver = new MethodCall(type, invokedMethod, methodReceiver, parameters); } else { receiver = null; } break; case MEMBER_SELECT: receiver = internalReprOfMemberSelect(provider, (MemberSelectTree) receiverTree); break; case IDENTIFIER: IdentifierTree identifierTree = (IdentifierTree) receiverTree; TypeMirror typeOfId = TreeUtils.typeOf(identifierTree); if (identifierTree.getName().contentEquals("this") || identifierTree.getName().contentEquals("super")) { receiver = new ThisReference(typeOfId); break; } assert TreeUtils.isUseOfElement(identifierTree) : "@AssumeAssertion(nullness): tree kind"; Element ele = TreeUtils.elementFromUse(identifierTree); if (ElementUtils.isClassElement(ele)) { receiver = new ClassName(ele.asType()); break; } switch (ele.getKind()) { case LOCAL_VARIABLE: case RESOURCE_VARIABLE: case EXCEPTION_PARAMETER: case PARAMETER: receiver = new LocalVariable(ele); break; case FIELD: // Implicit access expression, such as "this" or a class name Receiver fieldAccessExpression; @SuppressWarnings( "nullness:dereference.of.nullable") // a field has enclosing class TypeMirror enclosingType = ElementUtils.enclosingClass(ele).asType(); if (ElementUtils.isStatic(ele)) { fieldAccessExpression = new ClassName(enclosingType); } else { fieldAccessExpression = new ThisReference(enclosingType); } receiver = new FieldAccess( fieldAccessExpression, typeOfId, (VariableElement) ele); break; default: receiver = null; } break; case UNARY_PLUS: return internalReprOf( provider, ((UnaryTree) receiverTree).getExpression(), allowNonDeterministic); default: receiver = null; } if (receiver == null) { receiver = new Unknown(TreeUtils.typeOf(receiverTree)); } return receiver; } /** * Returns the implicit receiver of ele. * * <p>Returns either a new ClassName or a new ThisReference depending on whether ele is static * or not. The passed element must be a field, method, or class. * * @param ele field, method, or class * @return either a new ClassName or a new ThisReference depending on whether ele is static or * not */ public static Receiver internalReprOfImplicitReceiver(Element ele) { TypeElement enclosingClass = ElementUtils.enclosingClass(ele); if (enclosingClass == null) { throw new BugInCF( "internalReprOfImplicitReceiver's arg has no enclosing class: " + ele); } TypeMirror enclosingType = enclosingClass.asType(); if (ElementUtils.isStatic(ele)) { return new ClassName(enclosingType); } else { return new ThisReference(enclosingType); } } /** * Returns either a new ClassName or ThisReference Receiver object for the enclosingType. * * <p>The Tree should be an expression or a statement that does not have a receiver or an * implicit receiver. For example, a local variable declaration. * * @param path TreePath to tree * @param enclosingType type of the enclosing type * @return a new ClassName or ThisReference that is a Receiver object for the enclosingType */ public static Receiver internalReprOfPseudoReceiver(TreePath path, TypeMirror enclosingType) { if (TreeUtils.isTreeInStaticScope(path)) { return new ClassName(enclosingType); } else { return new ThisReference(enclosingType); } } private static Receiver internalReprOfMemberSelect( AnnotationProvider provider, MemberSelectTree memberSelectTree) { TypeMirror expressionType = TreeUtils.typeOf(memberSelectTree.getExpression()); if (TreeUtils.isClassLiteral(memberSelectTree)) { return new ClassName(expressionType); } assert TreeUtils.isUseOfElement(memberSelectTree) : "@AssumeAssertion(nullness): tree kind"; Element ele = TreeUtils.elementFromUse(memberSelectTree); if (ElementUtils.isClassElement(ele)) { // o instanceof MyClass.InnerClass // o instanceof MyClass.InnerInterface TypeMirror selectType = TreeUtils.typeOf(memberSelectTree); return new ClassName(selectType); } switch (ele.getKind()) { case METHOD: case CONSTRUCTOR: return internalReprOf(provider, memberSelectTree.getExpression()); case ENUM_CONSTANT: case FIELD: TypeMirror fieldType = TreeUtils.typeOf(memberSelectTree); Receiver r = internalReprOf(provider, memberSelectTree.getExpression()); return new FieldAccess(r, fieldType, (VariableElement) ele); default: throw new BugInCF("Unexpected element kind: %s element: %s", ele.getKind(), ele); } } /** * Returns Receiver objects for the formal parameters of the method in which path is enclosed. * * @param annotationProvider annotationProvider * @param path TreePath that is enclosed by the method * @return list of Receiver objects for the formal parameters of the method in which path is * enclosed, {@code null} otherwise */ public static @Nullable List<Receiver> getParametersOfEnclosingMethod( AnnotationProvider annotationProvider, TreePath path) { MethodTree methodTree = TreeUtils.enclosingMethod(path); if (methodTree == null) { return null; } List<Receiver> internalArguments = new ArrayList<>(); for (VariableTree arg : methodTree.getParameters()) { internalArguments.add(internalReprOf(annotationProvider, new LocalVariableNode(arg))); } return internalArguments; } }
package com.example.floralboutique.ui.admin.promotionlist; import android.arch.lifecycle.LiveData; import android.arch.lifecycle.ViewModel; import com.example.floralboutique.data.FloralBoutiqueRepository; import com.example.floralboutique.data.entity.Promotion; import java.util.List; public class AdminPromotionListViewModel extends ViewModel { private final FloralBoutiqueRepository repository_; public AdminPromotionListViewModel(FloralBoutiqueRepository repository) { repository_ = repository; } public LiveData<List<Promotion>> getAllPromotions() { return repository_.getAllPromotions(); } }
package com.araditc.aradsocket; import java.io.IOException; import java.nio.channels.ByteChannel; /** * Base class for exceptions used to control flow. * * <p>Because exceptions of this class are not used to signal errors, they don't contain stack * traces, to improve efficiency. * * <p>This class inherits from {@link IOException} as a compromise to allow {@link AradSocket} to * throw it while still implementing the {@link ByteChannel} interface. */ public abstract class AradSocketFlowControlException extends IOException { private static final long serialVersionUID = -2394919487958591959L; public AradSocketFlowControlException() { super(); } /** For efficiency, override this method to do nothing. */ @Override public Throwable fillInStackTrace() { return this; } }
package com.ygnn.gulimall.member.entity; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import java.math.BigDecimal; import java.io.Serializable; import java.util.Date; import lombok.Data; /** * 会员等级 * * @author Limpid * @email liufangkun1008@163.com * @date 2021-11-04 08:35:51 */ @Data @TableName("ums_member_level") public class MemberLevelEntity implements Serializable { private static final long serialVersionUID = 1L; /** * id */ @TableId private Long id; /** * 等级名称 */ private String name; /** * 等级需要的成长值 */ private Integer growthPoint; /** * 是否为默认等级[0->不是;1->是] */ private Integer defaultStatus; /** * 免运费标准 */ private BigDecimal freeFreightPoint; /** * 每次评价获取的成长值 */ private Integer commentGrowthPoint; /** * 是否有免邮特权 */ private Integer priviledgeFreeFreight; /** * 是否有会员价格特权 */ private Integer priviledgeMemberPrice; /** * 是否有生日特权 */ private Integer priviledgeBirthday; /** * 备注 */ private String note; }
package org.springframework.cloud.gateway.config; import java.util.Collections; import java.util.Map; import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent; import org.springframework.context.ApplicationListener; import org.springframework.core.Ordered; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.MapPropertySource; import static org.springframework.cloud.bootstrap.BootstrapApplicationListener.BOOTSTRAP_PROPERTY_SOURCE_NAME; import static org.springframework.cloud.bootstrap.BootstrapApplicationListener.DEFAULT_ORDER; /** * Reenables reactive http server after bootstrap. * TODO: remove when boot 2.0 handles the NONE web case * @author Spencer Gibb */ public class PostBootstrapApplicationListener implements ApplicationListener<ApplicationEnvironmentPreparedEvent>, Ordered { @Override public void onApplicationEvent(ApplicationEnvironmentPreparedEvent event) { ConfigurableEnvironment environment = event.getEnvironment(); // don't listen to events in a bootstrap context if (environment.getPropertySources().contains(BOOTSTRAP_PROPERTY_SOURCE_NAME)) { return; } final Map<String, Object> map = Collections.singletonMap("spring.reactive.enabled", "true"); environment.getPropertySources().addBefore("bootstrap-web-disabled", new MapPropertySource("bootstrap-web-reenable", map)); } @Override public int getOrder() { return DEFAULT_ORDER+1; } }
package com.yunlongstudio.design.headfirst.command.party; public class LightOffCommand implements Command { Light light; public LightOffCommand(Light light) { this.light = light; } public void execute() { light.off(); } public void undo() { light.on(); } }
package org.synyx.urlaubsverwaltung.workingtime; import org.springframework.data.jpa.domain.AbstractPersistable; import org.synyx.urlaubsverwaltung.period.DayLength; import org.synyx.urlaubsverwaltung.period.WeekDay; import org.synyx.urlaubsverwaltung.person.Person; import org.synyx.urlaubsverwaltung.settings.FederalState; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.OneToOne; import java.time.DayOfWeek; import java.time.LocalDate; import java.util.List; import java.util.Optional; /** * Entity representing the working time of a person. */ @Entity public class WorkingTime extends AbstractPersistable<Integer> { @OneToOne private Person person; @Enumerated(EnumType.STRING) private DayLength monday = DayLength.ZERO; @Enumerated(EnumType.STRING) private DayLength tuesday = DayLength.ZERO; @Enumerated(EnumType.STRING) private DayLength wednesday = DayLength.ZERO; @Enumerated(EnumType.STRING) private DayLength thursday = DayLength.ZERO; @Enumerated(EnumType.STRING) private DayLength friday = DayLength.ZERO; @Enumerated(EnumType.STRING) private DayLength saturday = DayLength.ZERO; @Enumerated(EnumType.STRING) private DayLength sunday = DayLength.ZERO; private LocalDate validFrom; /** * If set, override the system-wide FederalState setting for this person. TODO: Maybe we should embed the whole * WorkingTimeSettings to allow overriding all of them? */ @Enumerated(EnumType.STRING) private FederalState federalStateOverride; public void setWorkingDays(List<Integer> workingDays, DayLength dayLength) { setAllDayLengthsToZero(); if (!workingDays.isEmpty()) { for (Integer dayOfWeek : workingDays) { setDayLengthForWeekDay(dayOfWeek, dayLength); } } } public boolean hasWorkingDays(List<Integer> workingDays) { for (WeekDay day : WeekDay.values()) { int dayOfWeek = day.getDayOfWeek(); DayLength dayLength = getDayLengthForWeekDay(dayOfWeek); if (dayLength == DayLength.FULL) { // has to be in the given list if (!workingDays.contains(dayOfWeek)) { return false; } } else { // must not be in the given list if (workingDays.contains(dayOfWeek)) { return false; } } } return true; } private void setAllDayLengthsToZero() { this.monday = DayLength.ZERO; this.tuesday = DayLength.ZERO; this.wednesday = DayLength.ZERO; this.thursday = DayLength.ZERO; this.friday = DayLength.ZERO; this.saturday = DayLength.ZERO; this.sunday = DayLength.ZERO; } public Person getPerson() { return person; } public void setPerson(Person person) { this.person = person; } public DayLength getDayLengthForWeekDay(int weekDay) { switch (DayOfWeek.of(weekDay)) { case MONDAY: return this.monday; case TUESDAY: return this.tuesday; case WEDNESDAY: return this.wednesday; case THURSDAY: return this.thursday; case FRIDAY: return this.friday; case SATURDAY: return this.saturday; case SUNDAY: return this.sunday; default: return null; } } public void setDayLengthForWeekDay(int weekDay, DayLength dayLength) { switch (DayOfWeek.of(weekDay)) { case MONDAY: this.monday = dayLength; break; case TUESDAY: this.tuesday = dayLength; break; case WEDNESDAY: this.wednesday = dayLength; break; case THURSDAY: this.thursday = dayLength; break; case FRIDAY: this.friday = dayLength; break; case SATURDAY: this.saturday = dayLength; break; case SUNDAY: this.sunday = dayLength; break; default: break; } } public LocalDate getValidFrom() { if (this.validFrom == null) { return null; } return this.validFrom; } public void setValidFrom(LocalDate validFrom) { if (validFrom == null) { this.validFrom = null; } else { this.validFrom = validFrom; } } public DayLength getMonday() { return monday; } public DayLength getTuesday() { return tuesday; } public DayLength getWednesday() { return wednesday; } public DayLength getThursday() { return thursday; } public DayLength getFriday() { return friday; } public DayLength getSaturday() { return saturday; } public DayLength getSunday() { return sunday; } public Optional<FederalState> getFederalStateOverride() { return Optional.ofNullable(federalStateOverride); } public void setFederalStateOverride(FederalState federalState) { this.federalStateOverride = federalState; } }
package org.scotsbots.robot.recyclerush; import edu.wpi.first.wpilibj.DoubleSolenoid.Value; import edu.wpi.first.wpilibj.Timer; public class RobotOperationPracticebot extends RobotHardwarePracticebot { //TODO Create and modify arm position constants public static final int POSITION_0 = 0; public static final int POSITION_1 = 787; public static final int POSITION_2 = 2026; public static final int POSITION_3 = 3044; public static final int POSITION_4 = 3936; public static final int MAX_HEIGHT = 4250; public static int currentSetPosition = POSITION_0; /** * Resets robot to pre-defined state. */ public static void reset() { //setLiftPosition(POSITION_0); /*while((!liftBottomLimit.get() || !backupLiftBottomLimit.get()))// && (0 < liftEncoder.get())) { moveLift(0.75); }*/ //liftEncoder.reset(); //setLiftPosition(POSITION_0); closeArms(); } /** * Assuming lift is at position 1, the robot is not moving, and is above the tote, close arms and retract arms, move to 0 position, lift arms to position 2. */ public static void pickupTote() { //if arms extended, retract //if arms open, close arms //lower arms to position 1. setLiftPosition(POSITION_0); closeArms(); setLiftPosition(POSITION_1); } /** * Gets current position of arm and raises one position. */ public static void raiseLiftPosition() { if(liftEncoder.get() > POSITION_4) { ; } else if(liftEncoder.get() > POSITION_3) { setLiftPosition(POSITION_4); //currentSetPosition = POSITION_4; } else if(liftEncoder.get() > POSITION_2) { setLiftPosition(POSITION_3); //currentSetPosition = POSITION_3; } else if(liftEncoder.get() > POSITION_1) { setLiftPosition(POSITION_2); //currentSetPosition = POSITION_2; } else if(liftEncoder.get() > POSITION_0) { setLiftPosition(POSITION_1); //currentSetPosition = POSITION_1; } } /** * Gets current position of arm and lowers one position. */ public static void lowerLiftPosition() { if(liftEncoder.get() < POSITION_0) { ; } else if(liftEncoder.get() < POSITION_1) { setLiftPosition(POSITION_0); //currentSetPosition = POSITION_0; } else if(liftEncoder.get() < POSITION_2) { setLiftPosition(POSITION_1); //currentSetPosition = POSITION_1; } else if(liftEncoder.get() < POSITION_3) { setLiftPosition(POSITION_2); //currentSetPosition = POSITION_2; } else if(liftEncoder.get() < POSITION_4) { setLiftPosition(POSITION_3); //currentSetPosition = POSITION_3; } } public static void setLiftPosition(int encoderVal) { while(encoderVal > liftEncoder.get()) { moveLift(-0.75); } while(encoderVal < liftEncoder.get()) { moveLift(0.75); } currentSetPosition = encoderVal; } /** * Sets tote down at position 0. */ public static void setToteDown() { setLiftPosition(POSITION_1); Timer.delay(2); openArms(); } public static void openArms() { armSolenoid.set(Value.kForward); } public static void closeArms() { armSolenoid.set(Value.kReverse); } /** * Moves lift at speed. * @param speed between 1 and -1 */ public static void moveLift(double speed) { liftMotor.set(speed); } public static void moveArms(double speed) { armMotors.set(speed); } }
package com.appsmith.server.solutions; import com.appsmith.external.helpers.AppsmithBeanUtils; import com.appsmith.external.models.ActionConfiguration; import com.appsmith.external.models.DBAuth; import com.appsmith.external.models.Datasource; import com.appsmith.external.models.DatasourceConfiguration; import com.appsmith.external.models.InvisibleActionFields; import com.appsmith.external.models.Policy; import com.appsmith.external.models.Property; import com.appsmith.server.constants.FieldName; import com.appsmith.server.constants.SerialiseApplicationObjective; import com.appsmith.server.domains.ActionCollection; import com.appsmith.server.domains.Application; import com.appsmith.server.domains.ApplicationMode; import com.appsmith.server.domains.ApplicationPage; import com.appsmith.server.domains.GitApplicationMetadata; import com.appsmith.server.domains.Layout; import com.appsmith.server.domains.NewAction; import com.appsmith.server.domains.NewPage; import com.appsmith.server.domains.Plugin; import com.appsmith.server.domains.PluginType; import com.appsmith.server.domains.Theme; import com.appsmith.server.domains.Workspace; import com.appsmith.server.dtos.ActionCollectionDTO; import com.appsmith.server.dtos.ActionDTO; import com.appsmith.server.dtos.ApplicationAccessDTO; import com.appsmith.server.dtos.ApplicationImportDTO; import com.appsmith.server.dtos.ApplicationJson; import com.appsmith.server.dtos.ApplicationPagesDTO; import com.appsmith.server.dtos.PageDTO; import com.appsmith.server.dtos.PageNameIdDTO; import com.appsmith.server.exceptions.AppsmithError; import com.appsmith.server.exceptions.AppsmithException; import com.appsmith.server.helpers.MockPluginExecutor; import com.appsmith.server.helpers.PluginExecutorHelper; import com.appsmith.server.migrations.ApplicationVersion; import com.appsmith.server.migrations.JsonSchemaMigration; import com.appsmith.server.migrations.JsonSchemaVersions; import com.appsmith.server.repositories.ApplicationRepository; import com.appsmith.server.repositories.PluginRepository; import com.appsmith.server.repositories.ThemeRepository; import com.appsmith.server.services.ActionCollectionService; import com.appsmith.server.services.ApplicationPageService; import com.appsmith.server.services.ApplicationService; import com.appsmith.server.services.DatasourceService; import com.appsmith.server.services.LayoutActionService; import com.appsmith.server.services.LayoutCollectionService; import com.appsmith.server.services.NewActionService; import com.appsmith.server.services.NewPageService; import com.appsmith.server.services.WorkspaceService; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import net.minidev.json.JSONArray; import net.minidev.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.junit.Assert; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.MethodSorters; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.buffer.DataBuffer; import org.springframework.core.io.buffer.DataBufferUtils; import org.springframework.core.io.buffer.DefaultDataBufferFactory; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.codec.multipart.FilePart; import org.springframework.security.test.context.support.WithUserDetails; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.util.LinkedMultiValueMap; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import reactor.util.function.Tuple3; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import static com.appsmith.server.acl.AclPermission.MANAGE_ACTIONS; import static com.appsmith.server.acl.AclPermission.MANAGE_APPLICATIONS; import static com.appsmith.server.acl.AclPermission.MANAGE_DATASOURCES; import static com.appsmith.server.acl.AclPermission.MANAGE_PAGES; import static com.appsmith.server.acl.AclPermission.READ_ACTIONS; import static com.appsmith.server.acl.AclPermission.READ_APPLICATIONS; import static com.appsmith.server.acl.AclPermission.READ_PAGES; import static com.appsmith.server.constants.FieldName.DEFAULT_PAGE_LAYOUT; import static org.assertj.core.api.Assertions.assertThat; @Slf4j @RunWith(SpringRunner.class) @SpringBootTest @DirtiesContext @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class ImportExportApplicationServiceTests { @Autowired ImportExportApplicationService importExportApplicationService; @Autowired ApplicationPageService applicationPageService; @Autowired PluginRepository pluginRepository; @Autowired ApplicationRepository applicationRepository; @Autowired DatasourceService datasourceService; @Autowired NewPageService newPageService; @Autowired NewActionService newActionService; @Autowired WorkspaceService workspaceService; @Autowired LayoutActionService layoutActionService; @Autowired LayoutCollectionService layoutCollectionService; @Autowired ActionCollectionService actionCollectionService; @MockBean PluginExecutorHelper pluginExecutorHelper; @Autowired ThemeRepository themeRepository; @Autowired ApplicationService applicationService; private static final String INVALID_JSON_FILE = "invalid json file"; private static Plugin installedPlugin; private static String workspaceId; private static String testAppId; private static Datasource jsDatasource; private static final Map<String, Datasource> datasourceMap = new HashMap<>(); private static Plugin installedJsPlugin; private static Boolean isSetupDone = false; private static String exportWithConfigurationAppId; @Before public void setup() { Mockito .when(pluginExecutorHelper.getPluginExecutor(Mockito.any())) .thenReturn(Mono.just(new MockPluginExecutor())); if (Boolean.TRUE.equals(isSetupDone)) { return; } installedPlugin = pluginRepository.findByPackageName("installed-plugin").block(); Workspace workspace = new Workspace(); workspace.setName("Import-Export-Test-Workspace"); Workspace savedWorkspace = workspaceService.create(workspace).block(); workspaceId = savedWorkspace.getId(); Application testApplication = new Application(); testApplication.setName("Export-Application-Test-Application"); testApplication.setWorkspaceId(workspaceId); testApplication.setUpdatedAt(Instant.now()); testApplication.setLastDeployedAt(Instant.now()); testApplication.setModifiedBy("some-user"); testApplication.setGitApplicationMetadata(new GitApplicationMetadata()); Application savedApplication = applicationPageService.createApplication(testApplication, workspaceId).block(); testAppId = savedApplication.getId(); Datasource ds1 = new Datasource(); ds1.setName("DS1"); ds1.setWorkspaceId(workspaceId); ds1.setPluginId(installedPlugin.getId()); final DatasourceConfiguration datasourceConfiguration = new DatasourceConfiguration(); datasourceConfiguration.setUrl("http://httpbin.org/get"); datasourceConfiguration.setHeaders(List.of( new Property("X-Answer", "42") )); ds1.setDatasourceConfiguration(datasourceConfiguration); Datasource ds2 = new Datasource(); ds2.setName("DS2"); ds2.setPluginId(installedPlugin.getId()); ds2.setDatasourceConfiguration(new DatasourceConfiguration()); ds2.setWorkspaceId(workspaceId); DBAuth auth = new DBAuth(); auth.setPassword("awesome-password"); ds2.getDatasourceConfiguration().setAuthentication(auth); jsDatasource = new Datasource(); jsDatasource.setName("Default JS datasource"); jsDatasource.setWorkspaceId(workspaceId); installedJsPlugin = pluginRepository.findByPackageName("installed-js-plugin").block(); assert installedJsPlugin != null; jsDatasource.setPluginId(installedJsPlugin.getId()); ds1 = datasourceService.create(ds1).block(); ds2 = datasourceService.create(ds2).block(); datasourceMap.put("DS1", ds1); datasourceMap.put("DS2", ds2); isSetupDone = true; } private Flux<ActionDTO> getActionsInApplication(Application application) { return newPageService // fetch the unpublished pages .findByApplicationId(application.getId(), READ_PAGES, false) .flatMap(page -> newActionService.getUnpublishedActions(new LinkedMultiValueMap<>( Map.of(FieldName.PAGE_ID, Collections.singletonList(page.getId()))), "")); } private FilePart createFilePart(String filePath) { FilePart filepart = Mockito.mock(FilePart.class, Mockito.RETURNS_DEEP_STUBS); Flux<DataBuffer> dataBufferFlux = DataBufferUtils .read( new ClassPathResource(filePath), new DefaultDataBufferFactory(), 4096) .cache(); Mockito.when(filepart.content()).thenReturn(dataBufferFlux); Mockito.when(filepart.headers().getContentType()).thenReturn(MediaType.APPLICATION_JSON); return filepart; } private Mono<ApplicationJson> createAppJson(String filePath) { FilePart filePart = createFilePart(filePath); Mono<String> stringifiedFile = DataBufferUtils.join(filePart.content()) .map(dataBuffer -> { byte[] data = new byte[dataBuffer.readableByteCount()]; dataBuffer.read(data); DataBufferUtils.release(dataBuffer); return new String(data); }); return stringifiedFile .map(data -> { Gson gson = new Gson(); return gson.fromJson(data, ApplicationJson.class); }) .map(JsonSchemaMigration::migrateApplicationToLatestSchema); } private Workspace createTemplateWorkspace() { Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); return workspaceService.create(newWorkspace).block(); } @Test @WithUserDetails(value = "api_user") public void exportApplicationWithNullApplicationIdTest() { Mono<ApplicationJson> resultMono = importExportApplicationService.exportApplicationById(null, ""); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.INVALID_PARAMETER.getMessage(FieldName.APPLICATION_ID))) .verify(); } @Test @WithUserDetails(value = "api_user") public void exportApplication_withInvalidApplicationId_throwNoResourceFoundException() { Mono<ApplicationJson> resultMono = importExportApplicationService.exportApplicationById("invalidAppId", ""); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.NO_RESOURCE_FOUND.getMessage(FieldName.APPLICATION_ID, "invalidAppId"))) .verify(); } @Test @WithUserDetails(value = "api_user") public void exportApplicationById_WhenContainsInternalFields_InternalFieldsNotExported() { Mono<ApplicationJson> resultMono = importExportApplicationService.exportApplicationById(testAppId, ""); StepVerifier .create(resultMono) .assertNext(applicationJson -> { Application exportedApplication = applicationJson.getExportedApplication(); assertThat(exportedApplication.getModifiedBy()).isNull(); assertThat(exportedApplication.getLastUpdateTime()).isNull(); assertThat(exportedApplication.getLastEditedAt()).isNull(); assertThat(exportedApplication.getLastDeployedAt()).isNull(); assertThat(exportedApplication.getGitApplicationMetadata()).isNull(); assertThat(exportedApplication.getEditModeThemeId()).isNull(); assertThat(exportedApplication.getPublishedModeThemeId()).isNull(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void createExportAppJsonWithDatasourceButWithoutActionsTest() { Application testApplication = new Application(); testApplication.setName("Another Export Application"); final Mono<ApplicationJson> resultMono = workspaceService.getById(workspaceId) .flatMap(workspace -> { final Datasource ds1 = datasourceMap.get("DS1"); ds1.setWorkspaceId(workspace.getId()); final Datasource ds2 = datasourceMap.get("DS2"); ds2.setWorkspaceId(workspace.getId()); return applicationPageService.createApplication(testApplication, workspaceId); }) .flatMap(application -> importExportApplicationService.exportApplicationById(application.getId(), "")); StepVerifier.create(resultMono) .assertNext(applicationJson -> { assertThat(applicationJson.getPageList()).hasSize(1); assertThat(applicationJson.getActionList()).isEmpty(); assertThat(applicationJson.getDatasourceList()).isEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void createExportAppJsonWithActionAndActionCollectionTest() { Workspace newWorkspace = new Workspace(); newWorkspace.setName("template-org-with-ds"); Application testApplication = new Application(); testApplication.setName("ApplicationWithActionCollectionAndDatasource"); testApplication = applicationPageService.createApplication(testApplication, workspaceId).block(); assert testApplication != null; final String appName = testApplication.getName(); final Mono<ApplicationJson> resultMono = Mono.zip( Mono.just(testApplication), newPageService.findPageById(testApplication.getPages().get(0).getId(), READ_PAGES, false) ) .flatMap(tuple -> { Application testApp = tuple.getT1(); PageDTO testPage = tuple.getT2(); Layout layout = testPage.getLayouts().get(0); ObjectMapper objectMapper = new ObjectMapper(); JSONObject dsl = new JSONObject(); try { dsl = new JSONObject(objectMapper.readValue(DEFAULT_PAGE_LAYOUT, new TypeReference<HashMap<String, Object>>() { })); } catch (JsonProcessingException e) { e.printStackTrace(); } ArrayList children = (ArrayList) dsl.get("children"); JSONObject testWidget = new JSONObject(); testWidget.put("widgetName", "firstWidget"); JSONArray temp = new JSONArray(); temp.addAll(List.of(new JSONObject(Map.of("key", "testField")))); testWidget.put("dynamicBindingPathList", temp); testWidget.put("testField", "{{ validAction.data }}"); children.add(testWidget); JSONObject tableWidget = new JSONObject(); tableWidget.put("widgetName", "Table1"); tableWidget.put("type", "TABLE_WIDGET"); Map<String, Object> primaryColumns = new HashMap<>(); JSONObject jsonObject = new JSONObject(Map.of("key", "value")); primaryColumns.put("_id", "{{ PageAction.data }}"); primaryColumns.put("_class", jsonObject); tableWidget.put("primaryColumns", primaryColumns); final ArrayList<Object> objects = new ArrayList<>(); JSONArray temp2 = new JSONArray(); temp2.addAll(List.of(new JSONObject(Map.of("key", "primaryColumns._id")))); tableWidget.put("dynamicBindingPathList", temp2); children.add(tableWidget); layout.setDsl(dsl); layout.setPublishedDsl(dsl); ActionDTO action = new ActionDTO(); action.setName("validAction"); action.setPageId(testPage.getId()); action.setExecuteOnLoad(true); ActionConfiguration actionConfiguration = new ActionConfiguration(); actionConfiguration.setHttpMethod(HttpMethod.GET); action.setActionConfiguration(actionConfiguration); action.setDatasource(datasourceMap.get("DS2")); ActionDTO action2 = new ActionDTO(); action2.setName("validAction2"); action2.setPageId(testPage.getId()); action2.setExecuteOnLoad(true); action2.setUserSetOnLoad(true); ActionConfiguration actionConfiguration2 = new ActionConfiguration(); actionConfiguration2.setHttpMethod(HttpMethod.GET); action2.setActionConfiguration(actionConfiguration2); action2.setDatasource(datasourceMap.get("DS2")); ActionCollectionDTO actionCollectionDTO1 = new ActionCollectionDTO(); actionCollectionDTO1.setName("testCollection1"); actionCollectionDTO1.setPageId(testPage.getId()); actionCollectionDTO1.setApplicationId(testApp.getId()); actionCollectionDTO1.setWorkspaceId(testApp.getWorkspaceId()); actionCollectionDTO1.setPluginId(jsDatasource.getPluginId()); ActionDTO action1 = new ActionDTO(); action1.setName("testAction1"); action1.setActionConfiguration(new ActionConfiguration()); action1.getActionConfiguration().setBody("mockBody"); actionCollectionDTO1.setActions(List.of(action1)); actionCollectionDTO1.setPluginType(PluginType.JS); return layoutCollectionService.createCollection(actionCollectionDTO1) .then(layoutActionService.createSingleAction(action)) .then(layoutActionService.createSingleAction(action2)) .then(layoutActionService.updateLayout(testPage.getId(), layout.getId(), layout)) .then(importExportApplicationService.exportApplicationById(testApp.getId(), "")); }) .cache(); Mono<List<NewAction>> actionListMono = resultMono .then(newActionService .findAllByApplicationIdAndViewMode(testApplication.getId(), false, READ_ACTIONS, null).collectList()); Mono<List<ActionCollection>> collectionListMono = resultMono.then( actionCollectionService .findAllByApplicationIdAndViewMode(testApplication.getId(), false, READ_ACTIONS, null).collectList()); Mono<List<NewPage>> pageListMono = resultMono.then( newPageService .findNewPagesByApplicationId(testApplication.getId(), READ_PAGES).collectList()); StepVerifier .create(Mono.zip(resultMono, actionListMono, collectionListMono, pageListMono)) .assertNext(tuple -> { ApplicationJson applicationJson = tuple.getT1(); List<NewAction> DBActions = tuple.getT2(); List<ActionCollection> DBCollections = tuple.getT3(); List<NewPage> DBPages = tuple.getT4(); Application exportedApp = applicationJson.getExportedApplication(); List<NewPage> pageList = applicationJson.getPageList(); List<NewAction> actionList = applicationJson.getActionList(); List<ActionCollection> actionCollectionList = applicationJson.getActionCollectionList(); List<Datasource> datasourceList = applicationJson.getDatasourceList(); List<String> exportedCollectionIds = actionCollectionList.stream().map(ActionCollection::getId).collect(Collectors.toList()); List<String> exportedActionIds = actionList.stream().map(NewAction::getId).collect(Collectors.toList()); List<String> DBCollectionIds = DBCollections.stream().map(ActionCollection::getId).collect(Collectors.toList()); List<String> DBActionIds = DBActions.stream().map(NewAction::getId).collect(Collectors.toList()); List<String> DBOnLayoutLoadActionIds = new ArrayList<>(); List<String> exportedOnLayoutLoadActionIds = new ArrayList<>(); assertThat(DBPages).hasSize(1); DBPages.forEach(newPage -> newPage.getUnpublishedPage().getLayouts().forEach(layout -> { if (layout.getLayoutOnLoadActions() != null) { layout.getLayoutOnLoadActions().forEach(dslActionDTOSet -> { dslActionDTOSet.forEach(actionDTO -> DBOnLayoutLoadActionIds.add(actionDTO.getId())); }); } }) ); pageList.forEach(newPage -> newPage.getUnpublishedPage().getLayouts().forEach(layout -> { if (layout.getLayoutOnLoadActions() != null) { layout.getLayoutOnLoadActions().forEach(dslActionDTOSet -> { dslActionDTOSet.forEach(actionDTO -> exportedOnLayoutLoadActionIds.add(actionDTO.getId())); }); } }) ); NewPage defaultPage = pageList.get(0); // Check if the mongo escaped widget names are carried to exported file from DB Layout pageLayout = DBPages.get(0).getUnpublishedPage().getLayouts().get(0); Set<String> mongoEscapedWidgets = pageLayout.getMongoEscapedWidgetNames(); Set<String> expectedMongoEscapedWidgets = Set.of("Table1"); assertThat(mongoEscapedWidgets).isEqualTo(expectedMongoEscapedWidgets); pageLayout = pageList.get(0).getUnpublishedPage().getLayouts().get(0); Set<String> exportedMongoEscapedWidgets = pageLayout.getMongoEscapedWidgetNames(); assertThat(exportedMongoEscapedWidgets).isEqualTo(expectedMongoEscapedWidgets); assertThat(exportedApp.getName()).isEqualTo(appName); assertThat(exportedApp.getWorkspaceId()).isNull(); assertThat(exportedApp.getPages()).hasSize(1); assertThat(exportedApp.getPages().get(0).getId()).isEqualTo(defaultPage.getUnpublishedPage().getName()); assertThat(exportedApp.getPolicies()).isNull(); assertThat(pageList).hasSize(1); assertThat(defaultPage.getApplicationId()).isNull(); assertThat(defaultPage.getUnpublishedPage().getLayouts().get(0).getDsl()).isNotNull(); assertThat(defaultPage.getId()).isNull(); assertThat(defaultPage.getPolicies()).isNull(); assertThat(actionList.isEmpty()).isFalse(); assertThat(actionList).hasSize(3); NewAction validAction = actionList.stream().filter(action -> action.getId().equals("Page1_validAction")).findFirst().get(); assertThat(validAction.getApplicationId()).isNull(); assertThat(validAction.getPluginId()).isEqualTo(installedPlugin.getPackageName()); assertThat(validAction.getPluginType()).isEqualTo(PluginType.API); assertThat(validAction.getWorkspaceId()).isNull(); assertThat(validAction.getPolicies()).isNull(); assertThat(validAction.getId()).isNotNull(); ActionDTO unpublishedAction = validAction.getUnpublishedAction(); assertThat(unpublishedAction.getPageId()).isEqualTo(defaultPage.getUnpublishedPage().getName()); assertThat(unpublishedAction.getDatasource().getPluginId()).isEqualTo(installedPlugin.getPackageName()); NewAction testAction1 = actionList.stream().filter(action -> action.getUnpublishedAction().getName().equals("testAction1")).findFirst().get(); assertThat(testAction1.getId()).isEqualTo("Page1_testCollection1.testAction1"); assertThat(actionCollectionList.isEmpty()).isFalse(); assertThat(actionCollectionList).hasSize(1); final ActionCollection actionCollection = actionCollectionList.get(0); assertThat(actionCollection.getApplicationId()).isNull(); assertThat(actionCollection.getWorkspaceId()).isNull(); assertThat(actionCollection.getPolicies()).isNull(); assertThat(actionCollection.getId()).isNotNull(); assertThat(actionCollection.getUnpublishedCollection().getPluginType()).isEqualTo(PluginType.JS); assertThat(actionCollection.getUnpublishedCollection().getPageId()) .isEqualTo(defaultPage.getUnpublishedPage().getName()); assertThat(actionCollection.getUnpublishedCollection().getPluginId()).isEqualTo(installedJsPlugin.getPackageName()); assertThat(datasourceList).hasSize(1); Datasource datasource = datasourceList.get(0); assertThat(datasource.getWorkspaceId()).isNull(); assertThat(datasource.getId()).isNull(); assertThat(datasource.getPluginId()).isEqualTo(installedPlugin.getPackageName()); assertThat(datasource.getDatasourceConfiguration()).isNull(); assertThat(applicationJson.getInvisibleActionFields()).isNull(); NewAction validAction2 = actionList.stream().filter(action -> action.getId().equals("Page1_validAction2")).findFirst().get(); Assert.assertEquals(true, validAction2.getUnpublishedAction().getUserSetOnLoad()); assertThat(applicationJson.getUnpublishedLayoutmongoEscapedWidgets()).isNull(); assertThat(applicationJson.getPublishedLayoutmongoEscapedWidgets()).isNull(); assertThat(applicationJson.getEditModeTheme()).isNotNull(); assertThat(applicationJson.getEditModeTheme().isSystemTheme()).isTrue(); assertThat(applicationJson.getEditModeTheme().getName()).isEqualToIgnoringCase(Theme.DEFAULT_THEME_NAME); assertThat(applicationJson.getPublishedTheme()).isNotNull(); assertThat(applicationJson.getPublishedTheme().isSystemTheme()).isTrue(); assertThat(applicationJson.getPublishedTheme().getName()).isEqualToIgnoringCase(Theme.DEFAULT_THEME_NAME); assertThat(exportedCollectionIds).isNotEmpty(); assertThat(exportedCollectionIds).doesNotContain(String.valueOf(DBCollectionIds)); assertThat(exportedActionIds).isNotEmpty(); assertThat(exportedActionIds).doesNotContain(String.valueOf(DBActionIds)); assertThat(exportedOnLayoutLoadActionIds).isNotEmpty(); assertThat(exportedOnLayoutLoadActionIds).doesNotContain(String.valueOf(DBOnLayoutLoadActionIds)); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void createExportAppJsonForGitTest() { StringBuilder pageName = new StringBuilder(); final Mono<ApplicationJson> resultMono = applicationRepository.findById(testAppId) .flatMap(testApp -> { final String pageId = testApp.getPages().get(0).getId(); return Mono.zip( Mono.just(testApp), newPageService.findPageById(pageId, READ_PAGES, false) ); }) .flatMap(tuple -> { Datasource ds1 = datasourceMap.get("DS1"); Application testApp = tuple.getT1(); PageDTO testPage = tuple.getT2(); pageName.append(testPage.getName()); Layout layout = testPage.getLayouts().get(0); JSONObject dsl = new JSONObject(Map.of("text", "{{ query1.data }}")); layout.setDsl(dsl); layout.setPublishedDsl(dsl); ActionDTO action = new ActionDTO(); action.setName("validAction"); action.setPageId(testPage.getId()); action.setExecuteOnLoad(true); ActionConfiguration actionConfiguration = new ActionConfiguration(); actionConfiguration.setHttpMethod(HttpMethod.GET); action.setActionConfiguration(actionConfiguration); action.setDatasource(ds1); return layoutActionService.createAction(action) .then(importExportApplicationService.exportApplicationById(testApp.getId(), SerialiseApplicationObjective.VERSION_CONTROL)); }); StepVerifier .create(resultMono) .assertNext(applicationJson -> { Application exportedApp = applicationJson.getExportedApplication(); List<NewPage> pageList = applicationJson.getPageList(); List<NewAction> actionList = applicationJson.getActionList(); List<Datasource> datasourceList = applicationJson.getDatasourceList(); NewPage newPage = pageList.get(0); assertThat(applicationJson.getServerSchemaVersion()).isEqualTo(JsonSchemaVersions.serverVersion); assertThat(applicationJson.getClientSchemaVersion()).isEqualTo(JsonSchemaVersions.clientVersion); assertThat(exportedApp.getName()).isNotNull(); assertThat(exportedApp.getWorkspaceId()).isNull(); assertThat(exportedApp.getPages()).hasSize(1); assertThat(exportedApp.getPages().get(0).getId()).isEqualTo(pageName.toString()); assertThat(exportedApp.getGitApplicationMetadata()).isNull(); assertThat(exportedApp.getPolicies()).isNull(); assertThat(exportedApp.getUserPermissions()).isNull(); assertThat(pageList).hasSize(1); assertThat(newPage.getApplicationId()).isNull(); assertThat(newPage.getUnpublishedPage().getLayouts().get(0).getDsl()).isNotNull(); assertThat(newPage.getId()).isNull(); assertThat(newPage.getPolicies()).isNull(); assertThat(actionList.isEmpty()).isFalse(); NewAction validAction = actionList.get(0); assertThat(validAction.getApplicationId()).isNull(); assertThat(validAction.getPluginId()).isEqualTo(installedPlugin.getPackageName()); assertThat(validAction.getPluginType()).isEqualTo(PluginType.API); assertThat(validAction.getWorkspaceId()).isNull(); assertThat(validAction.getPolicies()).isNull(); assertThat(validAction.getId()).isNotNull(); assertThat(validAction.getUnpublishedAction().getPageId()) .isEqualTo(newPage.getUnpublishedPage().getName()); assertThat(datasourceList).hasSize(1); Datasource datasource = datasourceList.get(0); assertThat(datasource.getWorkspaceId()).isNull(); assertThat(datasource.getId()).isNull(); assertThat(datasource.getPluginId()).isEqualTo(installedPlugin.getPackageName()); assertThat(datasource.getDatasourceConfiguration()).isNull(); assertThat(applicationJson.getUnpublishedLayoutmongoEscapedWidgets()).isNull(); assertThat(applicationJson.getPublishedLayoutmongoEscapedWidgets()).isNull(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplicationFromInvalidFileTest() { FilePart filepart = Mockito.mock(FilePart.class, Mockito.RETURNS_DEEP_STUBS); Flux<DataBuffer> dataBufferFlux = DataBufferUtils .read(new ClassPathResource("test_assets/WorkspaceServiceTest/my_workspace_logo.png"), new DefaultDataBufferFactory(), 4096) .cache(); Mockito.when(filepart.content()).thenReturn(dataBufferFlux); Mockito.when(filepart.headers().getContentType()).thenReturn(MediaType.IMAGE_PNG); Mono<ApplicationImportDTO> resultMono = importExportApplicationService.extractFileAndSaveApplication(workspaceId, filepart); StepVerifier .create(resultMono) .expectErrorMatches(error -> error instanceof AppsmithException) .verify(); } @Test @WithUserDetails(value = "api_user") public void importApplicationWithNullWorkspaceIdTest() { FilePart filepart = Mockito.mock(FilePart.class, Mockito.RETURNS_DEEP_STUBS); Mono<ApplicationImportDTO> resultMono = importExportApplicationService .extractFileAndSaveApplication(null, filepart); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.INVALID_PARAMETER.getMessage(FieldName.WORKSPACE_ID))) .verify(); } @Test @WithUserDetails(value = "api_user") public void importApplicationFromInvalidJsonFileWithoutPagesTest() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/invalid-json-without-pages.json"); Mono<ApplicationImportDTO> resultMono = importExportApplicationService.extractFileAndSaveApplication(workspaceId,filePart); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.NO_RESOURCE_FOUND.getMessage(FieldName.PAGES, INVALID_JSON_FILE))) .verify(); } @Test @WithUserDetails(value = "api_user") public void importApplicationFromInvalidJsonFileWithoutApplicationTest() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/invalid-json-without-app.json"); Mono<ApplicationImportDTO> resultMono = importExportApplicationService.extractFileAndSaveApplication(workspaceId,filePart); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.NO_RESOURCE_FOUND.getMessage(FieldName.APPLICATION, INVALID_JSON_FILE))) .verify(); } @Test @WithUserDetails(value = "api_user") public void importApplicationFromValidJsonFileTest() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/valid-application.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); Policy manageAppPolicy = Policy.builder().permission(MANAGE_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); Policy readAppPolicy = Policy.builder().permission(READ_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); final Mono<ApplicationImportDTO> resultMono = workspaceService .create(newWorkspace) .flatMap(workspace -> importExportApplicationService .extractFileAndSaveApplication(workspace.getId(), filePart) ); StepVerifier .create(resultMono .flatMap(applicationImportDTO -> { Application application = applicationImportDTO.getApplication(); return Mono.zip( Mono.just(applicationImportDTO), datasourceService.findAllByWorkspaceId(application.getWorkspaceId(), MANAGE_DATASOURCES).collectList(), newActionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList(), newPageService.findByApplicationId(application.getId(), MANAGE_PAGES, false).collectList(), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, MANAGE_ACTIONS, null).collectList() ); })) .assertNext(tuple -> { final Application application = tuple.getT1().getApplication(); final List<Datasource> unConfiguredDatasourceList = tuple.getT1().getUnConfiguredDatasourceList(); final boolean isPartialImport = tuple.getT1().getIsPartialImport(); final List<Datasource> datasourceList = tuple.getT2(); final List<NewAction> actionList = tuple.getT3(); final List<PageDTO> pageList = tuple.getT4(); final List<ActionCollection> actionCollectionList = tuple.getT5(); assertThat(application.getName()).isEqualTo("valid_application"); assertThat(application.getWorkspaceId()).isNotNull(); assertThat(application.getPages()).hasSize(2); assertThat(application.getPolicies()).containsAll(Set.of(manageAppPolicy, readAppPolicy)); assertThat(application.getPublishedPages()).hasSize(1); assertThat(application.getModifiedBy()).isEqualTo("api_user"); assertThat(application.getUpdatedAt()).isNotNull(); assertThat(application.getEditModeThemeId()).isNotNull(); assertThat(application.getPublishedModeThemeId()).isNotNull(); assertThat(isPartialImport).isEqualTo(Boolean.TRUE); assertThat(unConfiguredDatasourceList).isNotNull(); assertThat(datasourceList).isNotEmpty(); datasourceList.forEach(datasource -> { assertThat(datasource.getWorkspaceId()).isEqualTo(application.getWorkspaceId()); assertThat(datasource.getDatasourceConfiguration()).isNotNull(); }); List<String> collectionIdInAction = new ArrayList<>(); assertThat(actionList).isNotEmpty(); actionList.forEach(newAction -> { ActionDTO actionDTO = newAction.getUnpublishedAction(); assertThat(actionDTO.getPageId()).isNotEqualTo(pageList.get(0).getName()); if (StringUtils.equals(actionDTO.getName(), "api_wo_auth")) { ActionDTO publishedAction = newAction.getPublishedAction(); assertThat(publishedAction).isNotNull(); assertThat(publishedAction.getActionConfiguration()).isNotNull(); // Test the fallback page ID from the unpublishedAction is copied to published version when // published version does not have pageId assertThat(actionDTO.getPageId()).isEqualTo(publishedAction.getPageId()); } if (!StringUtils.isEmpty(actionDTO.getCollectionId())) { collectionIdInAction.add(actionDTO.getCollectionId()); } }); assertThat(actionCollectionList).isNotEmpty(); actionCollectionList.forEach(actionCollection -> { assertThat(actionCollection.getUnpublishedCollection().getPageId()).isNotEqualTo(pageList.get(0).getName()); if (StringUtils.equals(actionCollection.getUnpublishedCollection().getName(), "JSObject2")) { // Check if this action collection is not attached to any action assertThat(collectionIdInAction).doesNotContain(actionCollection.getId()); } else { assertThat(collectionIdInAction).contains(actionCollection.getId()); } }); assertThat(pageList).hasSize(2); ApplicationPage defaultAppPage = application.getPages() .stream() .filter(ApplicationPage::getIsDefault) .findFirst() .orElse(null); assertThat(defaultAppPage).isNotNull(); PageDTO defaultPageDTO = pageList.stream() .filter(pageDTO -> pageDTO.getId().equals(defaultAppPage.getId())).findFirst().orElse(null); assertThat(defaultPageDTO).isNotNull(); assertThat(defaultPageDTO.getLayouts().get(0).getLayoutOnLoadActions()).isNotEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importFromValidJson_cancelledMidway_importSuccess() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/valid-application.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Midway cancel import app workspace"); newWorkspace = workspaceService.create(newWorkspace).block(); importExportApplicationService .extractFileAndSaveApplication(newWorkspace.getId(), filePart) .timeout(Duration.ofMillis(10)) .subscribe(); // Wait for import to complete Mono<Application> importedAppFromDbMono = Mono.just(newWorkspace) .flatMap(workspace -> { try { // Before fetching the imported application, sleep for 5 seconds to ensure that the import completes Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } return applicationRepository.findByWorkspaceId(workspace.getId(), READ_APPLICATIONS) .next(); }); StepVerifier.create(importedAppFromDbMono) .assertNext(application -> { assertThat(application.getId()).isNotEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplicationInWorkspace_WhenCustomizedThemes_ThemesCreated() { FilePart filePart = createFilePart( "test_assets/ImportExportServiceTest/valid-application-with-custom-themes.json" ); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Import theme test org"); final Mono<ApplicationImportDTO> resultMono = workspaceService .create(newWorkspace) .flatMap(workspace -> importExportApplicationService .extractFileAndSaveApplication(workspace.getId(), filePart) ); StepVerifier .create(resultMono .flatMap(applicationImportDTO -> Mono.zip( Mono.just(applicationImportDTO), themeRepository.findById(applicationImportDTO.getApplication().getEditModeThemeId()), themeRepository.findById(applicationImportDTO.getApplication().getPublishedModeThemeId()) ))) .assertNext(tuple -> { final Application application = tuple.getT1().getApplication(); Theme editTheme = tuple.getT2(); Theme publishedTheme = tuple.getT3(); assertThat(editTheme.isSystemTheme()).isFalse(); assertThat(editTheme.getDisplayName()).isEqualTo("Custom edit theme"); assertThat(editTheme.getWorkspaceId()).isNull(); assertThat(editTheme.getApplicationId()).isNull(); assertThat(publishedTheme.isSystemTheme()).isFalse(); assertThat(publishedTheme.getDisplayName()).isEqualTo("Custom published theme"); assertThat(publishedTheme.getWorkspaceId()).isNullOrEmpty(); assertThat(publishedTheme.getApplicationId()).isNullOrEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_withoutActionCollection_succeedsWithoutError() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/valid-application-without-action-collection.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); Policy manageAppPolicy = Policy.builder().permission(MANAGE_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); Policy readAppPolicy = Policy.builder().permission(READ_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); final Mono<ApplicationImportDTO> resultMono = workspaceService .create(newWorkspace) .flatMap(workspace -> importExportApplicationService .extractFileAndSaveApplication(workspace.getId(), filePart) ); StepVerifier .create(resultMono .flatMap(applicationImportDTO -> Mono.zip( Mono.just(applicationImportDTO), datasourceService.findAllByWorkspaceId(applicationImportDTO.getApplication().getWorkspaceId(), MANAGE_DATASOURCES).collectList(), getActionsInApplication(applicationImportDTO.getApplication()).collectList(), newPageService.findByApplicationId(applicationImportDTO.getApplication().getId(), MANAGE_PAGES, false).collectList(), actionCollectionService.findAllByApplicationIdAndViewMode(applicationImportDTO.getApplication().getId(), false , MANAGE_ACTIONS, null).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1().getApplication(); final List<Datasource> datasourceList = tuple.getT2(); final List<ActionDTO> actionDTOS = tuple.getT3(); final List<PageDTO> pageList = tuple.getT4(); final List<ActionCollection> actionCollectionList = tuple.getT5(); assertThat(application.getName()).isEqualTo("valid_application"); assertThat(application.getWorkspaceId()).isNotNull(); assertThat(application.getPages()).hasSize(2); assertThat(application.getPolicies()).containsAll(Set.of(manageAppPolicy, readAppPolicy)); assertThat(application.getPublishedPages()).hasSize(1); assertThat(application.getModifiedBy()).isEqualTo("api_user"); assertThat(application.getUpdatedAt()).isNotNull(); assertThat(datasourceList).isNotEmpty(); datasourceList.forEach(datasource -> { assertThat(datasource.getWorkspaceId()).isEqualTo(application.getWorkspaceId()); assertThat(datasource.getDatasourceConfiguration()).isNotNull(); }); assertThat(actionDTOS).isNotEmpty(); actionDTOS.forEach(actionDTO -> { assertThat(actionDTO.getPageId()).isNotEqualTo(pageList.get(0).getName()); }); assertThat(actionCollectionList).isEmpty(); assertThat(pageList).hasSize(2); ApplicationPage defaultAppPage = application.getPages() .stream() .filter(ApplicationPage::getIsDefault) .findFirst() .orElse(null); assertThat(defaultAppPage).isNotNull(); PageDTO defaultPageDTO = pageList.stream() .filter(pageDTO -> pageDTO.getId().equals(defaultAppPage.getId())).findFirst().orElse(null); assertThat(defaultPageDTO).isNotNull(); assertThat(defaultPageDTO.getLayouts().get(0).getLayoutOnLoadActions()).isNotEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_WithoutThemes_LegacyThemesAssigned() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/valid-application-without-theme.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); final Mono<ApplicationImportDTO> resultMono = workspaceService.create(newWorkspace) .flatMap(workspace -> importExportApplicationService .extractFileAndSaveApplication(workspace.getId(), filePart) ); StepVerifier .create(resultMono) .assertNext(applicationImportDTO -> { assertThat(applicationImportDTO.getApplication().getEditModeThemeId()).isNotEmpty(); assertThat(applicationImportDTO.getApplication().getPublishedModeThemeId()).isNotEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_withoutPageIdInActionCollection_succeeds() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/invalid-application-without-pageId-action-collection.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); final Mono<ApplicationImportDTO> resultMono = workspaceService .create(newWorkspace) .flatMap(workspace -> importExportApplicationService .extractFileAndSaveApplication(workspace.getId(), filePart) ); StepVerifier .create(resultMono .flatMap(applicationImportDTO -> Mono.zip( Mono.just(applicationImportDTO), datasourceService.findAllByWorkspaceId(applicationImportDTO.getApplication().getWorkspaceId(), MANAGE_DATASOURCES).collectList(), getActionsInApplication(applicationImportDTO.getApplication()).collectList(), newPageService.findByApplicationId(applicationImportDTO.getApplication().getId(), MANAGE_PAGES, false).collectList(), actionCollectionService .findAllByApplicationIdAndViewMode(applicationImportDTO.getApplication().getId(), false, MANAGE_ACTIONS, null).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1().getApplication(); final List<Datasource> datasourceList = tuple.getT2(); final List<ActionDTO> actionDTOS = tuple.getT3(); final List<PageDTO> pageList = tuple.getT4(); final List<ActionCollection> actionCollectionList = tuple.getT5(); assertThat(datasourceList).isNotEmpty(); assertThat(actionDTOS).hasSize(1); actionDTOS.forEach(actionDTO -> { assertThat(actionDTO.getPageId()).isNotEqualTo(pageList.get(0).getName()); }); assertThat(actionCollectionList).isEmpty(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void exportImportApplication_importWithBranchName_updateApplicationResourcesWithBranch() { Application testApplication = new Application(); testApplication.setName("Export-Import-Update-Branch_Test-App"); testApplication.setWorkspaceId(workspaceId); testApplication.setUpdatedAt(Instant.now()); testApplication.setLastDeployedAt(Instant.now()); testApplication.setModifiedBy("some-user"); testApplication.setGitApplicationMetadata(new GitApplicationMetadata()); GitApplicationMetadata gitData = new GitApplicationMetadata(); gitData.setBranchName("testBranch"); testApplication.setGitApplicationMetadata(gitData); Application savedApplication = applicationPageService.createApplication(testApplication, workspaceId) .flatMap(application1 -> { application1.getGitApplicationMetadata().setDefaultApplicationId(application1.getId()); return applicationService.save(application1); }).block(); Mono<Application> result = newPageService.findNewPagesByApplicationId(savedApplication.getId(), READ_PAGES).collectList() .flatMap(newPages -> { NewPage newPage = newPages.get(0); ActionDTO action = new ActionDTO(); action.setName("validAction"); action.setPageId(newPage.getId()); action.setExecuteOnLoad(true); ActionConfiguration actionConfiguration = new ActionConfiguration(); actionConfiguration.setHttpMethod(HttpMethod.GET); action.setActionConfiguration(actionConfiguration); action.setDatasource(datasourceMap.get("DS1")); return layoutActionService.createAction(action) .flatMap(createdAction -> newActionService.findById(createdAction.getId(), READ_ACTIONS)); }) .then(importExportApplicationService.exportApplicationById(savedApplication.getId(), SerialiseApplicationObjective.VERSION_CONTROL) .flatMap(applicationJson -> importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson, savedApplication.getId(), gitData.getBranchName()))) .cache(); Mono<List<NewPage>> updatedPagesMono = result.then(newPageService.findNewPagesByApplicationId(savedApplication.getId(), READ_PAGES).collectList()); Mono<List<NewAction>> updatedActionsMono = result.then(newActionService.findAllByApplicationIdAndViewMode(savedApplication.getId(), false, READ_PAGES, null).collectList()); StepVerifier .create(Mono.zip(result, updatedPagesMono, updatedActionsMono)) .assertNext(tuple -> { Application application = tuple.getT1(); List<NewPage> pageList = tuple.getT2(); List<NewAction> actionList = tuple.getT3(); final String branchName = application.getGitApplicationMetadata().getBranchName(); pageList.forEach(page -> { assertThat(page.getDefaultResources()).isNotNull(); assertThat(page.getDefaultResources().getBranchName()).isEqualTo(branchName); }); actionList.forEach(action -> { assertThat(action.getDefaultResources()).isNotNull(); assertThat(action.getDefaultResources().getBranchName()).isEqualTo(branchName); }); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_incompatibleJsonFile_throwException() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/incompatible_version.json"); Mono<ApplicationImportDTO> resultMono = importExportApplicationService.extractFileAndSaveApplication(workspaceId,filePart); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.INCOMPATIBLE_IMPORTED_JSON.getMessage())) .verify(); } @Test @WithUserDetails(value = "api_user") public void importApplication_withUnConfiguredDatasources_Success() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/valid-application-with-un-configured-datasource.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); Policy manageAppPolicy = Policy.builder().permission(MANAGE_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); Policy readAppPolicy = Policy.builder().permission(READ_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); final Mono<ApplicationImportDTO> resultMono = workspaceService .create(newWorkspace) .flatMap(workspace -> importExportApplicationService .extractFileAndSaveApplication(workspace.getId(), filePart) ); StepVerifier .create(resultMono .flatMap(applicationImportDTO -> { Application application = applicationImportDTO.getApplication(); return Mono.zip( Mono.just(applicationImportDTO), datasourceService.findAllByWorkspaceId(application.getWorkspaceId(), MANAGE_DATASOURCES).collectList(), newActionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList(), newPageService.findByApplicationId(application.getId(), MANAGE_PAGES, false).collectList(), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, MANAGE_ACTIONS, null).collectList() ); })) .assertNext(tuple -> { final Application application = tuple.getT1().getApplication(); final List<Datasource> unConfiguredDatasourceList = tuple.getT1().getUnConfiguredDatasourceList(); final boolean isPartialImport = tuple.getT1().getIsPartialImport(); final List<Datasource> datasourceList = tuple.getT2(); final List<NewAction> actionList = tuple.getT3(); final List<PageDTO> pageList = tuple.getT4(); final List<ActionCollection> actionCollectionList = tuple.getT5(); assertThat(application.getName()).isEqualTo("importExportTest"); assertThat(application.getWorkspaceId()).isNotNull(); assertThat(application.getPages()).hasSize(1); assertThat(application.getPolicies()).containsAll(Set.of(manageAppPolicy, readAppPolicy)); assertThat(application.getPublishedPages()).hasSize(1); assertThat(application.getModifiedBy()).isEqualTo("api_user"); assertThat(application.getUpdatedAt()).isNotNull(); assertThat(application.getEditModeThemeId()).isNotNull(); assertThat(application.getPublishedModeThemeId()).isNotNull(); assertThat(isPartialImport).isEqualTo(Boolean.TRUE); assertThat(unConfiguredDatasourceList.size()).isNotEqualTo(0); assertThat(datasourceList).isNotEmpty(); List<String> datasourceNames = unConfiguredDatasourceList.stream().map(Datasource::getName).collect(Collectors.toList()); assertThat(datasourceNames).contains("mongoDatasource", "postgresTest"); List<String> collectionIdInAction = new ArrayList<>(); assertThat(actionList).isNotEmpty(); actionList.forEach(newAction -> { ActionDTO actionDTO = newAction.getUnpublishedAction(); assertThat(actionDTO.getPageId()).isNotEqualTo(pageList.get(0).getName()); if (!StringUtils.isEmpty(actionDTO.getCollectionId())) { collectionIdInAction.add(actionDTO.getCollectionId()); } }); assertThat(actionCollectionList).isEmpty(); assertThat(pageList).hasSize(1); ApplicationPage defaultAppPage = application.getPages() .stream() .filter(ApplicationPage::getIsDefault) .findFirst() .orElse(null); assertThat(defaultAppPage).isNotNull(); PageDTO defaultPageDTO = pageList.stream() .filter(pageDTO -> pageDTO.getId().equals(defaultAppPage.getId())).findFirst().orElse(null); assertThat(defaultPageDTO).isNotNull(); }) .verifyComplete(); } public void importApplicationIntoWorkspace_pageRemovedAndUpdatedDefaultPageNameInBranchApplication_Success() { Application testApplication = new Application(); testApplication.setName("importApplicationIntoWorkspace_pageRemovedInBranchApplication_Success"); testApplication.setWorkspaceId(workspaceId); testApplication.setUpdatedAt(Instant.now()); testApplication.setLastDeployedAt(Instant.now()); testApplication.setModifiedBy("some-user"); testApplication.setGitApplicationMetadata(new GitApplicationMetadata()); GitApplicationMetadata gitData = new GitApplicationMetadata(); gitData.setBranchName("master"); testApplication.setGitApplicationMetadata(gitData); Application application = applicationPageService.createApplication(testApplication, workspaceId) .flatMap(application1 -> { application1.getGitApplicationMetadata().setDefaultApplicationId(application1.getId()); return applicationService.save(application1); }).block(); String gitSyncIdBeforeImport = newPageService.findById(application.getPages().get(0).getId(), MANAGE_PAGES).block().getGitSyncId(); PageDTO page = new PageDTO(); page.setName("Page 2"); page.setApplicationId(application.getId()); PageDTO savedPage = applicationPageService.createPage(page).block(); assert application.getId() != null; Set<String> applicationPageIdsBeforeImport = Objects.requireNonNull(applicationRepository.findById(application.getId()).block()) .getPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toSet()); ApplicationJson applicationJson = createAppJson("test_assets/ImportExportServiceTest/valid-application-with-page-removed.json").block(); applicationJson.getPageList().get(0).setGitSyncId(gitSyncIdBeforeImport); Application importedApplication = importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson, application.getId(), "master").block(); assert importedApplication != null; Mono<List<NewPage>> pageList = Flux.fromIterable( importedApplication .getPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toList()) ).flatMap(s -> newPageService.findById(s, MANAGE_PAGES)).collectList(); StepVerifier .create(pageList) .assertNext(newPages -> { // Check before import we had both the pages assertThat(applicationPageIdsBeforeImport).hasSize(2); assertThat(applicationPageIdsBeforeImport).contains(savedPage.getId()); assertThat(newPages.size()).isEqualTo(1); assertThat(importedApplication.getPages().size()).isEqualTo(1); assertThat(importedApplication.getPages().get(0).getId()).isEqualTo(newPages.get(0).getId()); assertThat(newPages.get(0).getPublishedPage().getName()).isEqualTo("importedPage"); assertThat(newPages.get(0).getGitSyncId()).isEqualTo(gitSyncIdBeforeImport); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplicationIntoWorkspace_pageAddedInBranchApplication_Success() { Application testApplication = new Application(); testApplication.setName("importApplicationIntoWorkspace_pageAddedInBranchApplication_Success"); testApplication.setWorkspaceId(workspaceId); testApplication.setUpdatedAt(Instant.now()); testApplication.setLastDeployedAt(Instant.now()); testApplication.setModifiedBy("some-user"); testApplication.setGitApplicationMetadata(new GitApplicationMetadata()); GitApplicationMetadata gitData = new GitApplicationMetadata(); gitData.setBranchName("master"); testApplication.setGitApplicationMetadata(gitData); Application application = applicationPageService.createApplication(testApplication, workspaceId) .flatMap(application1 -> { application1.getGitApplicationMetadata().setDefaultApplicationId(application1.getId()); return applicationService.save(application1); }).block(); String gitSyncIdBeforeImport = newPageService.findById(application.getPages().get(0).getId(), MANAGE_PAGES).block().getGitSyncId(); assert application.getId() != null; Set<String> applicationPageIdsBeforeImport = Objects.requireNonNull(applicationRepository.findById(application.getId()).block()) .getPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toSet()); ApplicationJson applicationJson = createAppJson("test_assets/ImportExportServiceTest/valid-application-with-page-added.json").block(); applicationJson.getPageList().get(0).setGitSyncId(gitSyncIdBeforeImport); Application applicationMono = importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson, application.getId(), "master").block(); Mono<List<NewPage>> pageList = Flux.fromIterable( applicationMono.getPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toList()) ).flatMap(s -> newPageService.findById(s, MANAGE_PAGES)).collectList(); StepVerifier .create(pageList) .assertNext(newPages -> { // Check before import we had both the pages assertThat(applicationPageIdsBeforeImport).hasSize(1); assertThat(newPages.size()).isEqualTo(3); List<String> pageNames = newPages.stream().map(newPage -> newPage.getUnpublishedPage().getName()).collect(Collectors.toList()); assertThat(pageNames).contains("Page1"); assertThat(pageNames).contains("Page2"); assertThat(pageNames).contains("Page3"); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importUpdatedApplicationIntoWorkspaceFromFile_publicApplication_visibilityFlagNotReset() { // Create a application and make it public // Now add a page and export the same import it to the app // Check if the policies and visibility flag are not reset Policy manageAppPolicy = Policy.builder().permission(MANAGE_APPLICATIONS.getValue()) .users(Set.of("api_user")) .build(); Policy readAppPolicy = Policy.builder().permission(READ_APPLICATIONS.getValue()) .users(Set.of("api_user", FieldName.ANONYMOUS_USER)) .build(); Application testApplication = new Application(); testApplication.setName("importUpdatedApplicationIntoWorkspaceFromFile_publicApplication_visibilityFlagNotReset"); testApplication.setWorkspaceId(workspaceId); testApplication.setUpdatedAt(Instant.now()); testApplication.setLastDeployedAt(Instant.now()); testApplication.setModifiedBy("some-user"); testApplication.setGitApplicationMetadata(new GitApplicationMetadata()); GitApplicationMetadata gitData = new GitApplicationMetadata(); gitData.setBranchName("master"); testApplication.setGitApplicationMetadata(gitData); Application application = applicationPageService.createApplication(testApplication, workspaceId) .flatMap(application1 -> { application1.getGitApplicationMetadata().setDefaultApplicationId(application1.getId()); return applicationService.save(application1); }).block(); ApplicationAccessDTO applicationAccessDTO = new ApplicationAccessDTO(); applicationAccessDTO.setPublicAccess(true); applicationService.changeViewAccess(application.getId(), "master", applicationAccessDTO).block(); Mono<Application> applicationMono = importExportApplicationService.exportApplicationById(application.getId(), "master") .flatMap(applicationJson -> importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson, application.getId(), "master")); StepVerifier .create(applicationMono) .assertNext(application1 -> { assertThat(application1.getIsPublic()).isEqualTo(Boolean.TRUE); assertThat(application1.getPolicies()).containsAll(Set.of(manageAppPolicy, readAppPolicy)); }) .verifyComplete(); } /** * Testcase for checking the discard changes flow for following events: * 1. Import application in org * 2. Add new page to the imported application * 3. User tries to import application from same application json file * 4. Added page will be removed */ @Test @WithUserDetails(value = "api_user") public void discardChange_addNewPageAfterImport_addedPageRemoved() { /* 1. Import application 2. Add single page to imported app 3. Import the application from same JSON with applicationId 4. Added page should be deleted from DB */ Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application.json"); String workspaceId = createTemplateWorkspace().getId(); final Mono<Application> resultMonoWithoutDiscardOperation = applicationJsonMono .flatMap(applicationJson -> { applicationJson.getExportedApplication().setName("discard-change-page-added"); return importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson); }) .flatMap(application -> { PageDTO page = new PageDTO(); page.setName("discard-page-test"); page.setApplicationId(application.getId()); return applicationPageService.createPage(page); }) .flatMap(page -> applicationRepository.findById(page.getApplicationId())) .cache(); StepVerifier .create(resultMonoWithoutDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), newPageService.findByApplicationId(application.getId(), MANAGE_PAGES, false).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<PageDTO> pageList = tuple.getT2(); assertThat(application.getName()).isEqualTo("discard-change-page-added"); assertThat(application.getWorkspaceId()).isNotNull(); assertThat(application.getPages()).hasSize(3); assertThat(application.getPublishedPages()).hasSize(1); assertThat(application.getModifiedBy()).isEqualTo("api_user"); assertThat(application.getUpdatedAt()).isNotNull(); assertThat(application.getEditModeThemeId()).isNotNull(); assertThat(application.getPublishedModeThemeId()).isNotNull(); assertThat(pageList).hasSize(3); ApplicationPage defaultAppPage = application.getPages() .stream() .filter(ApplicationPage::getIsDefault) .findFirst() .orElse(null); assertThat(defaultAppPage).isNotNull(); PageDTO defaultPageDTO = pageList.stream() .filter(pageDTO -> pageDTO.getId().equals(defaultAppPage.getId())).findFirst().orElse(null); assertThat(defaultPageDTO).isNotNull(); assertThat(defaultPageDTO.getLayouts().get(0).getLayoutOnLoadActions()).isNotEmpty(); List<String> pageNames = new ArrayList<>(); pageList.forEach(page -> pageNames.add(page.getName())); assertThat(pageNames).contains("discard-page-test"); }) .verifyComplete(); // Import the same application again to find if the added page is deleted final Mono<Application> resultMonoWithDiscardOperation = resultMonoWithoutDiscardOperation .flatMap(importedApplication -> applicationJsonMono .flatMap(applicationJson -> { importedApplication.setGitApplicationMetadata(new GitApplicationMetadata()); importedApplication.getGitApplicationMetadata().setDefaultApplicationId(importedApplication.getId()); return applicationService.save(importedApplication) .then(importExportApplicationService.importApplicationInWorkspace( importedApplication.getWorkspaceId(), applicationJson, importedApplication.getId(), "main") ); } ) ); StepVerifier .create(resultMonoWithDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), newPageService.findByApplicationId(application.getId(), MANAGE_PAGES, false).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<PageDTO> pageList = tuple.getT2(); assertThat(application.getPages()).hasSize(2); assertThat(application.getPublishedPages()).hasSize(1); assertThat(pageList).hasSize(2); List<String> pageNames = new ArrayList<>(); pageList.forEach(page -> pageNames.add(page.getName())); assertThat(pageNames).doesNotContain("discard-page-test"); }) .verifyComplete(); } /** * Testcase for checking the discard changes flow for following events: * 1. Import application in org * 2. Add new action to the imported application * 3. User tries to import application from same application json file * 4. Added action will be removed */ @Test @WithUserDetails(value = "api_user") public void discardChange_addNewActionAfterImport_addedActionRemoved() { Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application.json"); String workspaceId = createTemplateWorkspace().getId(); final Mono<Application> resultMonoWithoutDiscardOperation = applicationJsonMono .flatMap(applicationJson -> { applicationJson.getExportedApplication().setName("discard-change-action-added"); return importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson); }) .flatMap(application -> { ActionDTO action = new ActionDTO(); ActionConfiguration actionConfiguration = new ActionConfiguration(); actionConfiguration.setHttpMethod(HttpMethod.GET); action.setActionConfiguration(actionConfiguration); action.setDatasource(datasourceMap.get("DS1")); action.setName("discard-action-test"); action.setPageId(application.getPages().get(0).getId()); return layoutActionService.createAction(action); }) .flatMap(actionDTO -> newActionService.getById(actionDTO.getId())) .flatMap(newAction -> applicationRepository.findById(newAction.getApplicationId())) .cache(); StepVerifier .create(resultMonoWithoutDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), getActionsInApplication(application).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionDTO> actionList = tuple.getT2(); assertThat(application.getName()).isEqualTo("discard-change-action-added"); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionNames = new ArrayList<>(); actionList.forEach(actionDTO -> actionNames.add(actionDTO.getName())); assertThat(actionNames).contains("discard-action-test"); }) .verifyComplete(); // Import the same application again final Mono<Application> resultMonoWithDiscardOperation = resultMonoWithoutDiscardOperation .flatMap(importedApplication -> applicationJsonMono .flatMap(applicationJson -> { importedApplication.setGitApplicationMetadata(new GitApplicationMetadata()); importedApplication.getGitApplicationMetadata().setDefaultApplicationId(importedApplication.getId()); return applicationService.save(importedApplication) .then(importExportApplicationService.importApplicationInWorkspace( importedApplication.getWorkspaceId(), applicationJson, importedApplication.getId(), "main") ); } ) ); StepVerifier .create(resultMonoWithDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), getActionsInApplication(application).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionDTO> actionList = tuple.getT2(); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionNames = new ArrayList<>(); actionList.forEach(actionDTO -> actionNames.add(actionDTO.getName())); assertThat(actionNames).doesNotContain("discard-action-test"); }) .verifyComplete(); } /** * Testcase for checking the discard changes flow for following events: * 1. Import application in org * 2. Add actionCollection to the imported application * 3. User tries to import application from same application json file * 4. Added actionCollection will be removed */ @Test @WithUserDetails(value = "api_user") public void discardChange_addNewActionCollectionAfterImport_addedActionCollectionRemoved() { Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application-without-action-collection.json"); String workspaceId = createTemplateWorkspace().getId(); final Mono<Application> resultMonoWithoutDiscardOperation = applicationJsonMono .flatMap(applicationJson -> { applicationJson.getExportedApplication().setName("discard-change-collection-added"); return importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson); }) .flatMap(application -> { ActionCollectionDTO actionCollectionDTO1 = new ActionCollectionDTO(); actionCollectionDTO1.setName("discard-action-collection-test"); actionCollectionDTO1.setPageId(application.getPages().get(0).getId()); actionCollectionDTO1.setApplicationId(application.getId()); actionCollectionDTO1.setWorkspaceId(application.getWorkspaceId()); actionCollectionDTO1.setPluginId(jsDatasource.getPluginId()); ActionDTO action1 = new ActionDTO(); action1.setName("discard-action-collection-test-action"); action1.setActionConfiguration(new ActionConfiguration()); action1.getActionConfiguration().setBody("mockBody"); actionCollectionDTO1.setActions(List.of(action1)); actionCollectionDTO1.setPluginType(PluginType.JS); return layoutCollectionService.createCollection(actionCollectionDTO1); }) .flatMap(actionCollectionDTO -> actionCollectionService.getById(actionCollectionDTO.getId())) .flatMap(actionCollection -> applicationRepository.findById(actionCollection.getApplicationId())) .cache(); StepVerifier .create(resultMonoWithoutDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList(), getActionsInApplication(application).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionCollection> actionCollectionList = tuple.getT2(); final List<ActionDTO> actionList = tuple.getT3(); assertThat(application.getName()).isEqualTo("discard-change-collection-added"); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionCollectionNames = new ArrayList<>(); actionCollectionList.forEach(actionCollection -> actionCollectionNames.add(actionCollection.getUnpublishedCollection().getName())); assertThat(actionCollectionNames).contains("discard-action-collection-test"); List<String> actionNames = new ArrayList<>(); actionList.forEach(actionDTO -> actionNames.add(actionDTO.getName())); assertThat(actionNames).contains("discard-action-collection-test-action"); }) .verifyComplete(); // Import the same application again final Mono<Application> resultMonoWithDiscardOperation = resultMonoWithoutDiscardOperation .flatMap(importedApplication -> applicationJsonMono .flatMap(applicationJson -> { importedApplication.setGitApplicationMetadata(new GitApplicationMetadata()); importedApplication.getGitApplicationMetadata().setDefaultApplicationId(importedApplication.getId()); return applicationService.save(importedApplication) .then(importExportApplicationService.importApplicationInWorkspace( importedApplication.getWorkspaceId(), applicationJson, importedApplication.getId(), "main") ); } ) ); StepVerifier .create(resultMonoWithDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList(), getActionsInApplication(application).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionCollection> actionCollectionList = tuple.getT2(); final List<ActionDTO> actionList = tuple.getT3(); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionCollectionNames = new ArrayList<>(); actionCollectionList.forEach(actionCollection -> actionCollectionNames.add(actionCollection.getUnpublishedCollection().getName())); assertThat(actionCollectionNames).doesNotContain("discard-action-collection-test"); List<String> actionNames = new ArrayList<>(); actionList.forEach(actionDTO -> actionNames.add(actionDTO.getName())); assertThat(actionNames).doesNotContain("discard-action-collection-test-action"); }) .verifyComplete(); } /** * Testcase for checking the discard changes flow for following events: * 1. Import application in org * 2. Remove existing page from imported application * 3. Import application from same application json file * 4. Removed page will be restored */ @Test @WithUserDetails(value = "api_user") public void discardChange_removeNewPageAfterImport_removedPageRestored() { Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application.json"); String workspaceId = createTemplateWorkspace().getId(); final Mono<Application> resultMonoWithoutDiscardOperation = applicationJsonMono .flatMap(applicationJson -> { applicationJson.getExportedApplication().setName("discard-change-page-removed"); return importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson); }) .flatMap(application -> { Optional<ApplicationPage> applicationPage = application .getPages() .stream() .filter(page -> !page.isDefault()) .findFirst(); return applicationPageService.deleteUnpublishedPage(applicationPage.get().getId()); }) .flatMap(page -> applicationRepository.findById(page.getApplicationId())) .cache(); StepVerifier .create(resultMonoWithoutDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), newPageService.findByApplicationId(application.getId(), MANAGE_PAGES, false).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<PageDTO> pageList = tuple.getT2(); assertThat(application.getName()).isEqualTo("discard-change-page-removed"); assertThat(application.getWorkspaceId()).isNotNull(); assertThat(application.getPages()).hasSize(1); assertThat(pageList).hasSize(1); }) .verifyComplete(); // Import the same application again final Mono<Application> resultMonoWithDiscardOperation = resultMonoWithoutDiscardOperation .flatMap(importedApplication -> applicationJsonMono .flatMap(applicationJson -> { importedApplication.setGitApplicationMetadata(new GitApplicationMetadata()); importedApplication.getGitApplicationMetadata().setDefaultApplicationId(importedApplication.getId()); return applicationService.save(importedApplication) .then(importExportApplicationService.importApplicationInWorkspace( importedApplication.getWorkspaceId(), applicationJson, importedApplication.getId(), "main") ); } ) ); StepVerifier .create(resultMonoWithDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), newPageService.findByApplicationId(application.getId(), MANAGE_PAGES, false).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<PageDTO> pageList = tuple.getT2(); assertThat(application.getPages()).hasSize(2); assertThat(application.getPublishedPages()).hasSize(1); assertThat(pageList).hasSize(2); }) .verifyComplete(); } /** * Testcase for checking the discard changes flow for following events: * 1. Import application in org * 2. Remove existing action from imported application * 3. Import application from same application json file * 4. Removed action will be restored */ @Test @WithUserDetails(value = "api_user") public void discardChange_removeNewActionAfterImport_removedActionRestored() { Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application.json"); String workspaceId = createTemplateWorkspace().getId(); final String[] deletedActionName = new String[1]; final Mono<Application> resultMonoWithoutDiscardOperation = applicationJsonMono .flatMap(applicationJson -> { applicationJson.getExportedApplication().setName("discard-change-action-removed"); return importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson); }) .flatMap(application -> { return getActionsInApplication(application) .next() .flatMap(actionDTO -> { deletedActionName[0] = actionDTO.getName(); return newActionService.deleteUnpublishedAction(actionDTO.getId()); }) .then(applicationPageService.publish(application.getId(), true)); }) .cache(); StepVerifier .create(resultMonoWithoutDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), getActionsInApplication(application).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionDTO> actionList = tuple.getT2(); assertThat(application.getName()).isEqualTo("discard-change-action-removed"); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionNames = new ArrayList<>(); actionList.forEach(actionDTO -> actionNames.add(actionDTO.getName())); assertThat(actionNames).doesNotContain(deletedActionName[0]); }) .verifyComplete(); // Import the same application again final Mono<Application> resultMonoWithDiscardOperation = resultMonoWithoutDiscardOperation .flatMap(importedApplication -> applicationJsonMono .flatMap(applicationJson -> { importedApplication.setGitApplicationMetadata(new GitApplicationMetadata()); importedApplication.getGitApplicationMetadata().setDefaultApplicationId(importedApplication.getId()); return applicationService.save(importedApplication) .then(importExportApplicationService.importApplicationInWorkspace( importedApplication.getWorkspaceId(), applicationJson, importedApplication.getId(), "main") ); } ) ); StepVerifier .create(resultMonoWithDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), getActionsInApplication(application).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionDTO> actionList = tuple.getT2(); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionNames = new ArrayList<>(); actionList.forEach(actionDTO -> actionNames.add(actionDTO.getName())); assertThat(actionNames).contains(deletedActionName[0]); }) .verifyComplete(); } /** * Testcase for checking the discard changes flow for following events: * 1. Import application in org * 2. Remove existing actionCollection from imported application * 3. Import application from same application json file * 4. Removed actionCollection along-with actions will be restored */ @Test @WithUserDetails(value = "api_user") public void discardChange_removeNewActionCollection_removedActionCollectionRestored() { Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application.json"); String workspaceId = createTemplateWorkspace().getId(); final String[] deletedActionCollectionNames = new String[1]; final Mono<Application> resultMonoWithoutDiscardOperation = applicationJsonMono .flatMap(applicationJson -> { applicationJson.getExportedApplication().setName("discard-change-collection-removed"); return importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson); }) .flatMap(application -> { return actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null) .next() .flatMap(actionCollection -> { deletedActionCollectionNames[0] = actionCollection.getUnpublishedCollection().getName(); return actionCollectionService.deleteUnpublishedActionCollection(actionCollection.getId()); }) .then(applicationPageService.publish(application.getId(), true)); }) .cache(); StepVerifier .create(resultMonoWithoutDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionCollection> actionCollectionList = tuple.getT2(); assertThat(application.getName()).isEqualTo("discard-change-collection-removed"); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionCollectionNames = new ArrayList<>(); actionCollectionList.forEach(actionCollection -> actionCollectionNames.add(actionCollection.getUnpublishedCollection().getName())); assertThat(actionCollectionNames).doesNotContain(deletedActionCollectionNames); }) .verifyComplete(); // Import the same application again final Mono<Application> resultMonoWithDiscardOperation = resultMonoWithoutDiscardOperation .flatMap(importedApplication -> applicationJsonMono .flatMap(applicationJson -> { importedApplication.setGitApplicationMetadata(new GitApplicationMetadata()); importedApplication.getGitApplicationMetadata().setDefaultApplicationId(importedApplication.getId()); return applicationService.save(importedApplication) .then(importExportApplicationService.importApplicationInWorkspace( importedApplication.getWorkspaceId(), applicationJson, importedApplication.getId(), "main") ); } ) ); StepVerifier .create(resultMonoWithDiscardOperation .flatMap(application -> Mono.zip( Mono.just(application), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<ActionCollection> actionCollectionList = tuple.getT2(); assertThat(application.getWorkspaceId()).isNotNull(); List<String> actionCollectionNames = new ArrayList<>(); actionCollectionList.forEach(actionCollection -> actionCollectionNames.add(actionCollection.getUnpublishedCollection().getName())); assertThat(actionCollectionNames).contains(deletedActionCollectionNames); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void applySchemaMigration_jsonFileWithFirstVersion_migratedToLatestVersionSuccess() { FilePart filePart = createFilePart("test_assets/ImportExportServiceTest/file-with-v1.json"); Mono<String> stringifiedFile = DataBufferUtils.join(filePart.content()) .map(dataBuffer -> { byte[] data = new byte[dataBuffer.readableByteCount()]; dataBuffer.read(data); DataBufferUtils.release(dataBuffer); return new String(data); }); Mono<ApplicationJson> v1ApplicationMono = stringifiedFile .map(data -> { Gson gson = new Gson(); return gson.fromJson(data, ApplicationJson.class); }).cache(); Mono<ApplicationJson> migratedApplicationMono = v1ApplicationMono .map(applicationJson -> { ApplicationJson applicationJson1 = new ApplicationJson(); AppsmithBeanUtils.copyNestedNonNullProperties(applicationJson, applicationJson1); return JsonSchemaMigration.migrateApplicationToLatestSchema(applicationJson1); }); StepVerifier .create(Mono.zip(v1ApplicationMono, migratedApplicationMono)) .assertNext(tuple -> { ApplicationJson v1ApplicationJson = tuple.getT1(); ApplicationJson latestApplicationJson = tuple.getT2(); assertThat(v1ApplicationJson.getServerSchemaVersion()).isEqualTo(1); assertThat(v1ApplicationJson.getClientSchemaVersion()).isEqualTo(1); assertThat(latestApplicationJson.getServerSchemaVersion()).isEqualTo(JsonSchemaVersions.serverVersion); assertThat(latestApplicationJson.getClientSchemaVersion()).isEqualTo(JsonSchemaVersions.clientVersion); }) .verifyComplete(); } /** * Testcase to check if the application is exported with the datasource configuration object if this setting is * enabled from application object * This can be enabled with exportWithConfiguration: true */ @Test @WithUserDetails(value = "api_user") public void exportApplication_withDatasourceConfig_exportedWithDecryptedFields() { Workspace newWorkspace = new Workspace(); newWorkspace.setName("template-org-with-ds"); Application testApplication = new Application(); testApplication.setName("exportApplication_withCredentialsForSampleApps_SuccessWithDecryptFields"); testApplication.setExportWithConfiguration(true); testApplication = applicationPageService.createApplication(testApplication, workspaceId).block(); assert testApplication != null; exportWithConfigurationAppId = testApplication.getId(); ApplicationAccessDTO accessDTO = new ApplicationAccessDTO(); accessDTO.setPublicAccess(true); applicationService.changeViewAccess(exportWithConfigurationAppId, accessDTO).block(); final String appName = testApplication.getName(); final Mono<ApplicationJson> resultMono = Mono.zip( Mono.just(testApplication), newPageService.findPageById(testApplication.getPages().get(0).getId(), READ_PAGES, false) ) .flatMap(tuple -> { Application testApp = tuple.getT1(); PageDTO testPage = tuple.getT2(); Layout layout = testPage.getLayouts().get(0); ObjectMapper objectMapper = new ObjectMapper(); JSONObject dsl = new JSONObject(); try { dsl = new JSONObject(objectMapper.readValue(DEFAULT_PAGE_LAYOUT, new TypeReference<HashMap<String, Object>>() { })); } catch (JsonProcessingException e) { e.printStackTrace(); } ArrayList children = (ArrayList) dsl.get("children"); JSONObject testWidget = new JSONObject(); testWidget.put("widgetName", "firstWidget"); JSONArray temp = new JSONArray(); temp.addAll(List.of(new JSONObject(Map.of("key", "testField")))); testWidget.put("dynamicBindingPathList", temp); testWidget.put("testField", "{{ validAction.data }}"); children.add(testWidget); layout.setDsl(dsl); layout.setPublishedDsl(dsl); ActionDTO action = new ActionDTO(); action.setName("validAction"); action.setPageId(testPage.getId()); action.setExecuteOnLoad(true); ActionConfiguration actionConfiguration = new ActionConfiguration(); actionConfiguration.setHttpMethod(HttpMethod.GET); action.setActionConfiguration(actionConfiguration); action.setDatasource(datasourceMap.get("DS2")); ActionDTO action2 = new ActionDTO(); action2.setName("validAction2"); action2.setPageId(testPage.getId()); action2.setExecuteOnLoad(true); action2.setUserSetOnLoad(true); ActionConfiguration actionConfiguration2 = new ActionConfiguration(); actionConfiguration2.setHttpMethod(HttpMethod.GET); action2.setActionConfiguration(actionConfiguration2); action2.setDatasource(datasourceMap.get("DS2")); ActionCollectionDTO actionCollectionDTO1 = new ActionCollectionDTO(); actionCollectionDTO1.setName("testCollection1"); actionCollectionDTO1.setPageId(testPage.getId()); actionCollectionDTO1.setApplicationId(testApp.getId()); actionCollectionDTO1.setWorkspaceId(testApp.getWorkspaceId()); actionCollectionDTO1.setPluginId(jsDatasource.getPluginId()); ActionDTO action1 = new ActionDTO(); action1.setName("testAction1"); action1.setActionConfiguration(new ActionConfiguration()); action1.getActionConfiguration().setBody("mockBody"); actionCollectionDTO1.setActions(List.of(action1)); actionCollectionDTO1.setPluginType(PluginType.JS); return layoutCollectionService.createCollection(actionCollectionDTO1) .then(layoutActionService.createSingleAction(action)) .then(layoutActionService.createSingleAction(action2)) .then(layoutActionService.updateLayout(testPage.getId(), layout.getId(), layout)) .then(importExportApplicationService.exportApplicationById(testApp.getId(), "")); }) .cache(); Mono<List<NewAction>> actionListMono = resultMono .then(newActionService .findAllByApplicationIdAndViewMode(testApplication.getId(), false, READ_ACTIONS, null).collectList()); Mono<List<ActionCollection>> collectionListMono = resultMono.then( actionCollectionService .findAllByApplicationIdAndViewMode(testApplication.getId(), false, READ_ACTIONS, null).collectList()); Mono<List<NewPage>> pageListMono = resultMono.then( newPageService .findNewPagesByApplicationId(testApplication.getId(), READ_PAGES).collectList()); StepVerifier .create(Mono.zip(resultMono, actionListMono, collectionListMono, pageListMono)) .assertNext(tuple -> { ApplicationJson applicationJson = tuple.getT1(); List<NewAction> DBActions = tuple.getT2(); List<ActionCollection> DBCollections = tuple.getT3(); List<NewPage> DBPages = tuple.getT4(); Application exportedApp = applicationJson.getExportedApplication(); List<NewPage> pageList = applicationJson.getPageList(); List<NewAction> actionList = applicationJson.getActionList(); List<ActionCollection> actionCollectionList = applicationJson.getActionCollectionList(); List<Datasource> datasourceList = applicationJson.getDatasourceList(); List<String> exportedCollectionIds = actionCollectionList.stream().map(ActionCollection::getId).collect(Collectors.toList()); List<String> exportedActionIds = actionList.stream().map(NewAction::getId).collect(Collectors.toList()); List<String> DBCollectionIds = DBCollections.stream().map(ActionCollection::getId).collect(Collectors.toList()); List<String> DBActionIds = DBActions.stream().map(NewAction::getId).collect(Collectors.toList()); List<String> DBOnLayoutLoadActionIds = new ArrayList<>(); List<String> exportedOnLayoutLoadActionIds = new ArrayList<>(); DBPages.forEach(newPage -> newPage.getUnpublishedPage().getLayouts().forEach(layout -> { if (layout.getLayoutOnLoadActions() != null) { layout.getLayoutOnLoadActions().forEach(dslActionDTOSet -> { dslActionDTOSet.forEach(actionDTO -> DBOnLayoutLoadActionIds.add(actionDTO.getId())); }); } }) ); pageList.forEach(newPage -> newPage.getUnpublishedPage().getLayouts().forEach(layout -> { if (layout.getLayoutOnLoadActions() != null) { layout.getLayoutOnLoadActions().forEach(dslActionDTOSet -> { dslActionDTOSet.forEach(actionDTO -> exportedOnLayoutLoadActionIds.add(actionDTO.getId())); }); } }) ); NewPage defaultPage = pageList.get(0); assertThat(exportedApp.getName()).isEqualTo(appName); assertThat(exportedApp.getWorkspaceId()).isNull(); assertThat(exportedApp.getPages()).hasSize(1); ApplicationPage page = exportedApp.getPages().get(0); assertThat(page.getId()).isEqualTo(defaultPage.getUnpublishedPage().getName()); assertThat(page.getIsDefault()).isTrue(); assertThat(page.getDefaultPageId()).isNull(); assertThat(exportedApp.getPolicies()).isNull(); assertThat(pageList).hasSize(1); assertThat(defaultPage.getApplicationId()).isNull(); assertThat(defaultPage.getUnpublishedPage().getLayouts().get(0).getDsl()).isNotNull(); assertThat(defaultPage.getId()).isNull(); assertThat(defaultPage.getPolicies()).isNull(); assertThat(actionList.isEmpty()).isFalse(); assertThat(actionList).hasSize(3); NewAction validAction = actionList.stream().filter(action -> action.getId().equals("Page1_validAction")).findFirst().get(); assertThat(validAction.getApplicationId()).isNull(); assertThat(validAction.getPluginId()).isEqualTo(installedPlugin.getPackageName()); assertThat(validAction.getPluginType()).isEqualTo(PluginType.API); assertThat(validAction.getWorkspaceId()).isNull(); assertThat(validAction.getPolicies()).isNull(); assertThat(validAction.getId()).isNotNull(); ActionDTO unpublishedAction = validAction.getUnpublishedAction(); assertThat(unpublishedAction.getPageId()).isEqualTo(defaultPage.getUnpublishedPage().getName()); assertThat(unpublishedAction.getDatasource().getPluginId()).isEqualTo(installedPlugin.getPackageName()); NewAction testAction1 = actionList.stream().filter(action -> action.getUnpublishedAction().getName().equals("testAction1")).findFirst().get(); assertThat(testAction1.getId()).isEqualTo("Page1_testCollection1.testAction1"); assertThat(actionCollectionList.isEmpty()).isFalse(); assertThat(actionCollectionList).hasSize(1); final ActionCollection actionCollection = actionCollectionList.get(0); assertThat(actionCollection.getApplicationId()).isNull(); assertThat(actionCollection.getWorkspaceId()).isNull(); assertThat(actionCollection.getPolicies()).isNull(); assertThat(actionCollection.getId()).isNotNull(); assertThat(actionCollection.getUnpublishedCollection().getPluginType()).isEqualTo(PluginType.JS); assertThat(actionCollection.getUnpublishedCollection().getPageId()) .isEqualTo(defaultPage.getUnpublishedPage().getName()); assertThat(actionCollection.getUnpublishedCollection().getPluginId()).isEqualTo(installedJsPlugin.getPackageName()); assertThat(datasourceList).hasSize(1); Datasource datasource = datasourceList.get(0); assertThat(datasource.getWorkspaceId()).isNull(); assertThat(datasource.getId()).isNull(); assertThat(datasource.getPluginId()).isEqualTo(installedPlugin.getPackageName()); assertThat(datasource.getDatasourceConfiguration()).isNotNull(); final Map<String, InvisibleActionFields> invisibleActionFields = applicationJson.getInvisibleActionFields(); assertThat(invisibleActionFields).isNull(); for (NewAction newAction : actionList) { if (newAction.getId().equals("Page1_validAction2")) { Assert.assertEquals(true, newAction.getUnpublishedAction().getUserSetOnLoad()); } else { Assert.assertEquals(false, newAction.getUnpublishedAction().getUserSetOnLoad()); } } assertThat(applicationJson.getUnpublishedLayoutmongoEscapedWidgets()).isNull(); assertThat(applicationJson.getPublishedLayoutmongoEscapedWidgets()).isNull(); assertThat(applicationJson.getEditModeTheme()).isNotNull(); assertThat(applicationJson.getEditModeTheme().isSystemTheme()).isTrue(); assertThat(applicationJson.getEditModeTheme().getName()).isEqualToIgnoringCase(Theme.DEFAULT_THEME_NAME); assertThat(applicationJson.getPublishedTheme()).isNotNull(); assertThat(applicationJson.getPublishedTheme().isSystemTheme()).isTrue(); assertThat(applicationJson.getPublishedTheme().getName()).isEqualToIgnoringCase(Theme.DEFAULT_THEME_NAME); assertThat(exportedCollectionIds).isNotEmpty(); assertThat(exportedCollectionIds).doesNotContain(String.valueOf(DBCollectionIds)); assertThat(exportedActionIds).isNotEmpty(); assertThat(exportedActionIds).doesNotContain(String.valueOf(DBActionIds)); assertThat(exportedOnLayoutLoadActionIds).isNotEmpty(); assertThat(exportedOnLayoutLoadActionIds).doesNotContain(String.valueOf(DBOnLayoutLoadActionIds)); assertThat(applicationJson.getDecryptedFields()).isNotNull(); }) .verifyComplete(); } /** * Test to check if the application can be exported with read only access if this is sample application */ @Test @WithUserDetails(value = "usertest@usertest.com") public void exportApplication_withReadOnlyAccess_exportedWithDecryptedFields() { Mono<ApplicationJson> exportApplicationMono = importExportApplicationService .exportApplicationById(exportWithConfigurationAppId, SerialiseApplicationObjective.SHARE); StepVerifier .create(exportApplicationMono) .assertNext(applicationJson -> { assertThat(applicationJson.getExportedApplication()).isNotNull(); assertThat(applicationJson.getDecryptedFields()).isNotNull(); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_datasourceWithSameNameAndDifferentPlugin_importedWithValidActionsAndSuffixedDatasource() { ApplicationJson applicationJson = createAppJson("test_assets/ImportExportServiceTest/valid-application.json").block(); Workspace testWorkspace = new Workspace(); testWorkspace.setName("Duplicate datasource with different plugin org"); testWorkspace = workspaceService.create(testWorkspace).block(); Datasource testDatasource = new Datasource(); // Chose any plugin except for mongo, as json static file has mongo plugin for datasource Plugin postgreSQLPlugin = pluginRepository.findByName("PostgreSQL").block(); testDatasource.setPluginId(postgreSQLPlugin.getId()); testDatasource.setWorkspaceId(testWorkspace.getId()); final String datasourceName = applicationJson.getDatasourceList().get(0).getName(); testDatasource.setName(datasourceName); datasourceService.create(testDatasource).block(); final Mono<Application> resultMono = importExportApplicationService.importApplicationInWorkspace(testWorkspace.getId(), applicationJson); StepVerifier .create(resultMono .flatMap(application -> Mono.zip( Mono.just(application), datasourceService.findAllByWorkspaceId(application.getWorkspaceId(), MANAGE_DATASOURCES).collectList(), newActionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<Datasource> datasourceList = tuple.getT2(); final List<NewAction> actionList = tuple.getT3(); assertThat(application.getName()).isEqualTo("valid_application"); List<String> datasourceNameList = new ArrayList<>(); assertThat(datasourceList).isNotEmpty(); datasourceList.forEach(datasource -> { assertThat(datasource.getWorkspaceId()).isEqualTo(application.getWorkspaceId()); datasourceNameList.add(datasource.getName()); }); // Check if both suffixed and newly imported datasource are present assertThat(datasourceNameList).contains(datasourceName, datasourceName + " #1"); assertThat(actionList).isNotEmpty(); actionList.forEach(newAction -> { ActionDTO actionDTO = newAction.getUnpublishedAction(); assertThat(actionDTO.getDatasource()).isNotNull(); }); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_datasourceWithSameNameAndPlugin_importedWithValidActionsWithoutSuffixedDatasource() { ApplicationJson applicationJson = createAppJson("test_assets/ImportExportServiceTest/valid-application.json").block(); Workspace testWorkspace = new Workspace(); testWorkspace.setName("Duplicate datasource with same plugin org"); testWorkspace = workspaceService.create(testWorkspace).block(); Datasource testDatasource = new Datasource(); // Chose plugin same as mongo, as json static file has mongo plugin for datasource Plugin postgreSQLPlugin = pluginRepository.findByName("MongoDB").block(); testDatasource.setPluginId(postgreSQLPlugin.getId()); testDatasource.setWorkspaceId(testWorkspace.getId()); final String datasourceName = applicationJson.getDatasourceList().get(0).getName(); testDatasource.setName(datasourceName); datasourceService.create(testDatasource).block(); final Mono<Application> resultMono = importExportApplicationService.importApplicationInWorkspace(testWorkspace.getId(), applicationJson); StepVerifier .create(resultMono .flatMap(application -> Mono.zip( Mono.just(application), datasourceService.findAllByWorkspaceId(application.getWorkspaceId(), MANAGE_DATASOURCES).collectList(), newActionService.findAllByApplicationIdAndViewMode(application.getId(), false, READ_ACTIONS, null).collectList() ))) .assertNext(tuple -> { final Application application = tuple.getT1(); final List<Datasource> datasourceList = tuple.getT2(); final List<NewAction> actionList = tuple.getT3(); assertThat(application.getName()).isEqualTo("valid_application"); List<String> datasourceNameList = new ArrayList<>(); assertThat(datasourceList).isNotEmpty(); datasourceList.forEach(datasource -> { assertThat(datasource.getWorkspaceId()).isEqualTo(application.getWorkspaceId()); datasourceNameList.add(datasource.getName()); }); // Check that there are no datasources are created with suffix names as datasource's are of same plugin assertThat(datasourceNameList).contains(datasourceName); assertThat(actionList).isNotEmpty(); actionList.forEach(newAction -> { ActionDTO actionDTO = newAction.getUnpublishedAction(); assertThat(actionDTO.getDatasource()).isNotNull(); }); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void exportAndImportApplication_withMultiplePagesOrderSameInDeployAndEditMode_PagesOrderIsMaintainedInEditAndViewMode() { Workspace newWorkspace = new Workspace(); newWorkspace.setName("template-org-with-ds"); Application testApplication = new Application(); testApplication.setName("exportAndImportApplication_withMultiplePagesOrderSameInDeployAndEditMode_PagesOrderIsMaintainedInEditAndViewMode"); testApplication.setExportWithConfiguration(true); testApplication = applicationPageService.createApplication(testApplication, workspaceId).block(); assert testApplication != null; PageDTO testPage1 = new PageDTO(); testPage1.setName("testPage1"); testPage1.setApplicationId(testApplication.getId()); testPage1 = applicationPageService.createPage(testPage1).block(); PageDTO testPage2 = new PageDTO(); testPage2.setName("testPage2"); testPage2.setApplicationId(testApplication.getId()); testPage2 = applicationPageService.createPage(testPage2).block(); // Set order for the newly created pages applicationPageService.reorderPage(testApplication.getId(), testPage1.getId(), 0, null).block(); applicationPageService.reorderPage(testApplication.getId(), testPage2.getId(), 1, null).block(); // Deploy the current application applicationPageService.publish(testApplication.getId(), true).block(); Mono<ApplicationJson> applicationJsonMono = importExportApplicationService.exportApplicationById(testApplication.getId(), "").cache(); StepVerifier .create(applicationJsonMono) .assertNext(applicationJson -> { assertThat(applicationJson.getPageOrder()).isNull(); assertThat(applicationJson.getPublishedPageOrder()).isNull(); List<String> pageList = applicationJson.getExportedApplication().getPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toList()); assertThat(pageList.get(0)).isEqualTo("testPage1"); assertThat(pageList.get(1)).isEqualTo("testPage2"); assertThat(pageList.get(2)).isEqualTo("Page1"); List<String> publishedPageList = applicationJson.getExportedApplication().getPublishedPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toList()); assertThat(publishedPageList.get(0)).isEqualTo("testPage1"); assertThat(publishedPageList.get(1)).isEqualTo("testPage2"); assertThat(publishedPageList.get(2)).isEqualTo("Page1"); }) .verifyComplete(); ApplicationJson applicationJson = applicationJsonMono.block(); Application application = importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson).block(); // Get the unpublished pages and verify the order List<ApplicationPage> pageDTOS = application.getPages(); Mono<NewPage> newPageMono1 = newPageService.findById(pageDTOS.get(0).getId(), MANAGE_PAGES); Mono<NewPage> newPageMono2 = newPageService.findById(pageDTOS.get(1).getId(), MANAGE_PAGES); Mono<NewPage> newPageMono3 = newPageService.findById(pageDTOS.get(2).getId(), MANAGE_PAGES); StepVerifier .create(Mono.zip(newPageMono1, newPageMono2, newPageMono3)) .assertNext(objects -> { NewPage newPage1 = objects.getT1(); NewPage newPage2 = objects.getT2(); NewPage newPage3 = objects.getT3(); assertThat(newPage1.getUnpublishedPage().getName()).isEqualTo("testPage1"); assertThat(newPage2.getUnpublishedPage().getName()).isEqualTo("testPage2"); assertThat(newPage3.getUnpublishedPage().getName()).isEqualTo("Page1"); assertThat(newPage1.getId()).isEqualTo(pageDTOS.get(0).getId()); assertThat(newPage2.getId()).isEqualTo(pageDTOS.get(1).getId()); assertThat(newPage3.getId()).isEqualTo(pageDTOS.get(2).getId()); }) .verifyComplete(); // Get the published pages List<ApplicationPage> publishedPageDTOs = application.getPublishedPages(); Mono<NewPage> newPublishedPageMono1 = newPageService.findById(publishedPageDTOs.get(0).getId(), MANAGE_PAGES); Mono<NewPage> newPublishedPageMono2 = newPageService.findById(publishedPageDTOs.get(1).getId(), MANAGE_PAGES); Mono<NewPage> newPublishedPageMono3 = newPageService.findById(publishedPageDTOs.get(2).getId(), MANAGE_PAGES); StepVerifier .create(Mono.zip(newPublishedPageMono1, newPublishedPageMono2, newPublishedPageMono3)) .assertNext(objects -> { NewPage newPage1 = objects.getT1(); NewPage newPage2 = objects.getT2(); NewPage newPage3 = objects.getT3(); assertThat(newPage1.getPublishedPage().getName()).isEqualTo("testPage1"); assertThat(newPage2.getPublishedPage().getName()).isEqualTo("testPage2"); assertThat(newPage3.getPublishedPage().getName()).isEqualTo("Page1"); assertThat(newPage1.getId()).isEqualTo(publishedPageDTOs.get(0).getId()); assertThat(newPage2.getId()).isEqualTo(publishedPageDTOs.get(1).getId()); assertThat(newPage3.getId()).isEqualTo(publishedPageDTOs.get(2).getId()); }) .verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void exportAndImportApplication_withMultiplePagesOrderDifferentInDeployAndEditMode_PagesOrderIsMaintainedInEditAndViewMode() { Workspace newWorkspace = new Workspace(); newWorkspace.setName("template-org-with-ds"); Application testApplication = new Application(); testApplication.setName("exportAndImportApplication_withMultiplePagesOrderDifferentInDeployAndEditMode_PagesOrderIsMaintainedInEditAndViewMode"); testApplication.setExportWithConfiguration(true); testApplication = applicationPageService.createApplication(testApplication, workspaceId).block(); assert testApplication != null; PageDTO testPage1 = new PageDTO(); testPage1.setName("testPage1"); testPage1.setApplicationId(testApplication.getId()); testPage1 = applicationPageService.createPage(testPage1).block(); PageDTO testPage2 = new PageDTO(); testPage2.setName("testPage2"); testPage2.setApplicationId(testApplication.getId()); testPage2 = applicationPageService.createPage(testPage2).block(); // Deploy the current application so that edit and view mode will have different page order applicationPageService.publish(testApplication.getId(), true).block(); // Set order for the newly created pages applicationPageService.reorderPage(testApplication.getId(), testPage1.getId(), 0, null).block(); applicationPageService.reorderPage(testApplication.getId(), testPage2.getId(), 1, null).block(); Mono<ApplicationJson> applicationJsonMono = importExportApplicationService.exportApplicationById(testApplication.getId(), "").cache(); StepVerifier .create(applicationJsonMono) .assertNext(applicationJson -> { Application exportedApplication = applicationJson.getExportedApplication(); exportedApplication.setViewMode(false); List<String> pageOrder = exportedApplication.getPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toList()); assertThat(pageOrder.get(0)).isEqualTo("testPage1"); assertThat(pageOrder.get(1)).isEqualTo("testPage2"); assertThat(pageOrder.get(2)).isEqualTo("Page1"); pageOrder.clear(); pageOrder = exportedApplication.getPublishedPages() .stream() .map(ApplicationPage::getId) .collect(Collectors.toList()); assertThat(pageOrder.get(0)).isEqualTo("Page1"); assertThat(pageOrder.get(1)).isEqualTo("testPage1"); assertThat(pageOrder.get(2)).isEqualTo("testPage2"); }) .verifyComplete(); ApplicationJson applicationJson = applicationJsonMono.block(); Application application = importExportApplicationService.importApplicationInWorkspace(workspaceId, applicationJson).block(); // Get the unpublished pages and verify the order application.setViewMode(false); List<ApplicationPage> pageDTOS = application.getPages(); Mono<NewPage> newPageMono1 = newPageService.findById(pageDTOS.get(0).getId(), MANAGE_PAGES); Mono<NewPage> newPageMono2 = newPageService.findById(pageDTOS.get(1).getId(), MANAGE_PAGES); Mono<NewPage> newPageMono3 = newPageService.findById(pageDTOS.get(2).getId(), MANAGE_PAGES); StepVerifier .create(Mono.zip(newPageMono1, newPageMono2, newPageMono3)) .assertNext(objects -> { NewPage newPage1 = objects.getT1(); NewPage newPage2 = objects.getT2(); NewPage newPage3 = objects.getT3(); assertThat(newPage1.getUnpublishedPage().getName()).isEqualTo("testPage1"); assertThat(newPage2.getUnpublishedPage().getName()).isEqualTo("testPage2"); assertThat(newPage3.getUnpublishedPage().getName()).isEqualTo("Page1"); assertThat(newPage1.getId()).isEqualTo(pageDTOS.get(0).getId()); assertThat(newPage2.getId()).isEqualTo(pageDTOS.get(1).getId()); assertThat(newPage3.getId()).isEqualTo(pageDTOS.get(2).getId()); }) .verifyComplete(); // Get the published pages List<ApplicationPage> publishedPageDTOs = application.getPublishedPages(); Mono<NewPage> newPublishedPageMono1 = newPageService.findById(publishedPageDTOs.get(0).getId(), MANAGE_PAGES); Mono<NewPage> newPublishedPageMono2 = newPageService.findById(publishedPageDTOs.get(1).getId(), MANAGE_PAGES); Mono<NewPage> newPublishedPageMono3 = newPageService.findById(publishedPageDTOs.get(2).getId(), MANAGE_PAGES); StepVerifier .create(Mono.zip(newPublishedPageMono1, newPublishedPageMono2, newPublishedPageMono3)) .assertNext(objects -> { NewPage newPage1 = objects.getT1(); NewPage newPage2 = objects.getT2(); NewPage newPage3 = objects.getT3(); assertThat(newPage1.getPublishedPage().getName()).isEqualTo("Page1"); assertThat(newPage2.getPublishedPage().getName()).isEqualTo("testPage1"); assertThat(newPage3.getPublishedPage().getName()).isEqualTo("testPage2"); assertThat(newPage1.getId()).isEqualTo(publishedPageDTOs.get(0).getId()); assertThat(newPage2.getId()).isEqualTo(publishedPageDTOs.get(1).getId()); assertThat(newPage3.getId()).isEqualTo(publishedPageDTOs.get(2).getId()); }) .verifyComplete(); } private ApplicationJson createApplicationJSON(List<String> pageNames) { ApplicationJson applicationJson = new ApplicationJson(); // set the application data Application application = new Application(); application.setName("Template Application"); application.setSlug("template-application"); application.setForkingEnabled(true); application.setIsPublic(true); application.setApplicationVersion(ApplicationVersion.LATEST_VERSION); applicationJson.setExportedApplication(application); Datasource sampleDatasource = new Datasource(); sampleDatasource.setName("SampleDS"); sampleDatasource.setPluginId("restapi-plugin"); applicationJson.setDatasourceList(List.of(sampleDatasource)); // add pages and actions List<NewPage> newPageList = new ArrayList<>(pageNames.size()); List<NewAction> actionList = new ArrayList<>(); List<ActionCollection> actionCollectionList = new ArrayList<>(); for(String pageName : pageNames) { NewPage newPage = new NewPage(); newPage.setUnpublishedPage(new PageDTO()); newPage.getUnpublishedPage().setName(pageName); newPage.getUnpublishedPage().setLayouts(List.of()); newPageList.add(newPage); NewAction action = new NewAction(); action.setId(pageName + "_SampleQuery"); action.setPluginType(PluginType.API); action.setPluginId("restapi-plugin"); action.setUnpublishedAction(new ActionDTO()); action.getUnpublishedAction().setName("SampleQuery"); action.getUnpublishedAction().setPageId(pageName); action.getUnpublishedAction().setDatasource(new Datasource()); action.getUnpublishedAction().getDatasource().setId("SampleDS"); action.getUnpublishedAction().getDatasource().setPluginId("restapi-plugin"); actionList.add(action); ActionCollection actionCollection = new ActionCollection(); actionCollection.setId(pageName + "_SampleJS"); actionCollection.setUnpublishedCollection(new ActionCollectionDTO()); actionCollection.getUnpublishedCollection().setName("SampleJS"); actionCollection.getUnpublishedCollection().setPageId(pageName); actionCollection.getUnpublishedCollection().setPluginId("js-plugin"); actionCollection.getUnpublishedCollection().setPluginType(PluginType.JS); actionCollection.getUnpublishedCollection().setBody("export default {\\n\\t\\n}"); actionCollectionList.add(actionCollection); } applicationJson.setPageList(newPageList); applicationJson.setActionList(actionList); applicationJson.setActionCollectionList(actionCollectionList); return applicationJson; } @Test @WithUserDetails("api_user") public void mergeApplicationJsonWithApplication_WhenPageNameConflicts_PageNamesRenamed() { String uniqueString = UUID.randomUUID().toString(); Application destApplication = new Application(); destApplication.setName("App_" + uniqueString); destApplication.setSlug("my-slug"); destApplication.setIsPublic(false); destApplication.setForkingEnabled(false); Mono<Application> createAppAndPageMono = applicationPageService.createApplication(destApplication, workspaceId) .flatMap(application -> { PageDTO pageDTO = new PageDTO(); pageDTO.setName("Home"); pageDTO.setApplicationId(application.getId()); return applicationPageService.createPage(pageDTO).thenReturn(application); }); // let's create an ApplicationJSON which we'll merge with application created by createAppAndPageMono ApplicationJson applicationJson = createApplicationJSON(List.of("Home", "About")); Mono<Tuple3<ApplicationPagesDTO, List<NewAction>, List<ActionCollection>>> tuple2Mono = createAppAndPageMono.flatMap(application -> // merge the application json with the application we've created importExportApplicationService.mergeApplicationJsonWithApplication(application.getWorkspaceId(), application.getId(), null, applicationJson, null) .thenReturn(application) ).flatMap(application -> // fetch the application pages, this should contain pages from application json Mono.zip( newPageService.findApplicationPages(application.getId(), null, null, ApplicationMode.EDIT), newActionService.findAllByApplicationIdAndViewMode(application.getId(), false, MANAGE_ACTIONS, null).collectList(), actionCollectionService.findAllByApplicationIdAndViewMode(application.getId(), false, MANAGE_ACTIONS, null).collectList() ) ); StepVerifier.create(tuple2Mono).assertNext(objects -> { ApplicationPagesDTO applicationPagesDTO = objects.getT1(); List<NewAction> newActionList = objects.getT2(); List<ActionCollection> actionCollectionList = objects.getT3(); assertThat(applicationPagesDTO.getApplication().getName()).isEqualTo(destApplication.getName()); assertThat(applicationPagesDTO.getApplication().getSlug()).isEqualTo(destApplication.getSlug()); assertThat(applicationPagesDTO.getApplication().getIsPublic()).isFalse(); assertThat(applicationPagesDTO.getApplication().getForkingEnabled()).isFalse(); assertThat(applicationPagesDTO.getPages().size()).isEqualTo(4); List<String> pageNames = applicationPagesDTO.getPages().stream() .map(PageNameIdDTO::getName) .collect(Collectors.toList()); assertThat(pageNames).contains("Home", "Home2", "About"); assertThat(newActionList.size()).isEqualTo(2); // we imported two pages and each page has one action assertThat(actionCollectionList.size()).isEqualTo(2); // we imported two pages and each page has one Collection }).verifyComplete(); } @Test @WithUserDetails("api_user") public void mergeApplicationJsonWithApplication_WhenPageListIProvided_OnlyListedPagesAreMerged() { String uniqueString = UUID.randomUUID().toString(); Application destApplication = new Application(); destApplication.setName("App_" + uniqueString); Mono<Application> createAppAndPageMono = applicationPageService.createApplication(destApplication, workspaceId) .flatMap(application -> { PageDTO pageDTO = new PageDTO(); pageDTO.setName("Home"); pageDTO.setApplicationId(application.getId()); return applicationPageService.createPage(pageDTO).thenReturn(application); }); // let's create an ApplicationJSON which we'll merge with application created by createAppAndPageMono ApplicationJson applicationJson = createApplicationJSON(List.of("Profile", "About", "Contact US")); Mono<ApplicationPagesDTO> applicationPagesDTOMono = createAppAndPageMono.flatMap(application -> // merge the application json with the application we've created importExportApplicationService.mergeApplicationJsonWithApplication(application.getWorkspaceId(), application.getId(), null, applicationJson, List.of("About", "Contact US")) .thenReturn(application) ).flatMap(application -> // fetch the application pages, this should contain pages from application json newPageService.findApplicationPages(application.getId(), null, null, ApplicationMode.EDIT) ); StepVerifier.create(applicationPagesDTOMono).assertNext(applicationPagesDTO -> { assertThat(applicationPagesDTO.getPages().size()).isEqualTo(4); List<String> pageNames = applicationPagesDTO.getPages().stream() .map(PageNameIdDTO::getName) .collect(Collectors.toList()); assertThat(pageNames).contains("Home", "About", "Contact US"); assertThat(pageNames).doesNotContain("Profile"); }).verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void exportApplicationById_WhenThemeDoesNotExist_ExportedWithDefaultTheme() { Theme customTheme = new Theme(); customTheme.setName("my-custom-theme"); String randomId = UUID.randomUUID().toString(); Application testApplication = new Application(); testApplication.setName("Application_" + randomId); Mono<ApplicationJson> exportedAppJson = applicationPageService.createApplication(testApplication, workspaceId) .flatMap(application -> { application.setEditModeThemeId("invalid-theme-id"); application.setPublishedModeThemeId("invalid-theme-id"); String branchName = null; return applicationService.save(application) .then(importExportApplicationService.exportApplicationById(application.getId(), branchName)); }); StepVerifier.create(exportedAppJson).assertNext(applicationJson -> { assertThat(applicationJson.getEditModeTheme().getName()).isEqualToIgnoringCase(Theme.DEFAULT_THEME_NAME); assertThat(applicationJson.getPublishedTheme().getName()).isEqualToIgnoringCase(Theme.DEFAULT_THEME_NAME); }).verifyComplete(); } @Test @WithUserDetails(value = "api_user") public void importApplication_invalidPluginReferenceForDatasource_throwException() { Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Workspace"); ApplicationJson appJson = createAppJson("test_assets/ImportExportServiceTest/valid-application.json").block(); assert appJson != null; final String randomId = UUID.randomUUID().toString(); appJson.getDatasourceList().get(0).setPluginId(randomId); final Mono<Application> resultMono = workspaceService .create(newWorkspace) .flatMap(workspace -> importExportApplicationService.importApplicationInWorkspace(workspace.getId(), appJson)); StepVerifier .create(resultMono) .expectErrorMatches(throwable -> throwable instanceof AppsmithException && throwable.getMessage().equals(AppsmithError.UNKNOWN_PLUGIN_REFERENCE.getMessage(randomId))) .verify(); } @Test @WithUserDetails(value = "api_user") public void importApplication_importSameApplicationTwice_applicationImportedLaterWithSuffixCount() { Mono<ApplicationJson> applicationJsonMono = createAppJson("test_assets/ImportExportServiceTest/valid-application-without-action-collection.json"); Workspace newWorkspace = new Workspace(); newWorkspace.setName("Template Organization"); Mono<Workspace> createWorkspaceMono = workspaceService.create(newWorkspace).cache(); final Mono<Application> importApplicationMono = createWorkspaceMono .zipWith(applicationJsonMono) .flatMap(tuple -> { Workspace workspace = tuple.getT1(); ApplicationJson applicationJson = tuple.getT2(); return importExportApplicationService .importApplicationInWorkspace(workspace.getId(), applicationJson); }); StepVerifier .create(importApplicationMono.zipWhen(application -> importApplicationMono)) .assertNext(tuple -> { Application firstImportedApplication = tuple.getT1(); Application secondImportedApplication = tuple.getT2(); assertThat(firstImportedApplication.getName()).isEqualTo("valid_application"); assertThat(secondImportedApplication.getName()).isEqualTo("valid_application (1)"); assertThat(firstImportedApplication.getWorkspaceId()).isEqualTo(secondImportedApplication.getWorkspaceId()); assertThat(firstImportedApplication.getWorkspaceId()).isNotNull(); }) .verifyComplete(); } }
package com.alibaba.weex.uitest.TC_AG; import com.alibaba.weex.WXPageActivity; import com.alibaba.weex.util.TestFlow; import java.util.HashMap; import org.junit.Before; import org.junit.Test; public class AG_Border_Input_Border_Top_Right_Radius extends TestFlow { public AG_Border_Input_Border_Top_Right_Radius() { super(WXPageActivity.class); } @Before public void setUp() throws InterruptedException { super.setUp(); HashMap testMap = new <String, Object> HashMap(); testMap.put("testComponet", "AG_Border"); testMap.put("testChildCaseInit", "AG_Border_Input_Border_Top_Right_Radius"); testMap.put("step1",new HashMap(){ { put("click", "10"); put("screenshot", "AG_Border_Input_Border_Top_Right_Radius_01_10"); } }); testMap.put("step2",new HashMap(){ { put("click", "20"); put("screenshot", "AG_Border_Input_Border_Top_Right_Radius_02_20"); } }); super.setTestMap(testMap); } @Test public void doTest(){ super.testByTestMap(); } }