text
stringlengths 7
1.01M
|
|---|
package edu.scripps.yates.proteindb.persistence.mysql.utils.tablemapper.idtablemapper;
import java.util.Collection;
import org.apache.log4j.Logger;
import edu.scripps.yates.proteindb.persistence.mysql.access.PreparedCriteria;
import gnu.trove.TIntCollection;
import gnu.trove.iterator.TIntIterator;
import gnu.trove.map.hash.TIntObjectHashMap;
import gnu.trove.set.TIntSet;
import gnu.trove.set.hash.TIntHashSet;
public class ConditionIDToRatioDescriptorIDTableMapper extends IDTableMapper {
private final static Logger log = Logger.getLogger(ConditionIDToRatioDescriptorIDTableMapper.class);
private static ConditionIDToRatioDescriptorIDTableMapper instance;
private ConditionIDToRatioDescriptorIDTableMapper() {
super();
log.info("ID mapping table between conditions and ratio descriptors is loaded:");
log.info(getRatioDescriptorsByConditionsTableMap().size() + " conditions mapped to "
+ getConditionsByRatioDescriptorsTableMap().size() + " ratio descriptors");
}
public synchronized static ConditionIDToRatioDescriptorIDTableMapper getInstance() {
if (instance == null) {
instance = new ConditionIDToRatioDescriptorIDTableMapper();
}
if (instance.get_1By2Map().isEmpty()) {
instance.processDataFromDB(instance.getMapTableFromDB());
}
return instance;
}
public TIntSet getRatioDescriptorIDsFromConditionIDs(Collection<Integer> conditionIDs) {
final TIntSet ret = new TIntHashSet(conditionIDs.size());
for (final Integer conditionID : conditionIDs) {
if (getRatioDescriptorsByConditionsTableMap().containsKey(conditionID)) {
ret.addAll(getRatioDescriptorsByConditionsTableMap().get(conditionID));
}
}
return ret;
}
public TIntSet getRatioDescriptorIDsFromConditionID(int conditionID) {
final TIntSet ret = new TIntHashSet();
if (getRatioDescriptorsByConditionsTableMap().containsKey(conditionID)) {
ret.addAll(getRatioDescriptorsByConditionsTableMap().get(conditionID));
}
return ret;
}
public TIntSet getRatioDescriptorIDsFromConditionIDs(TIntCollection conditionIDs) {
final TIntSet ret = new TIntHashSet(conditionIDs.size());
final TIntIterator iterator = conditionIDs.iterator();
while (iterator.hasNext()) {
final Integer conditionID = iterator.next();
if (getRatioDescriptorsByConditionsTableMap().containsKey(conditionID)) {
ret.addAll(getRatioDescriptorsByConditionsTableMap().get(conditionID));
}
}
return ret;
}
public TIntSet getConditionIDsFromRatioDescriptorIDs(Collection<Integer> ratioDescriptorIDs) {
final TIntSet ret = new TIntHashSet(ratioDescriptorIDs.size());
for (final Integer ratioDescriptorID : ratioDescriptorIDs) {
if (getConditionsByRatioDescriptorsTableMap().containsKey(ratioDescriptorID)) {
ret.addAll(getConditionsByRatioDescriptorsTableMap().get(ratioDescriptorID));
}
}
return ret;
}
public TIntSet getConditionIDsFromRatioDescriptorID(int ratioDescriptorID) {
final TIntSet ret = new TIntHashSet();
if (getConditionsByRatioDescriptorsTableMap().containsKey(ratioDescriptorID)) {
ret.addAll(getConditionsByRatioDescriptorsTableMap().get(ratioDescriptorID));
}
return ret;
}
public TIntSet getConditionIDsFromRatioDescriptorIDs(TIntCollection ratioDescriptorIDs) {
final TIntSet ret = new TIntHashSet(ratioDescriptorIDs.size());
final TIntIterator iterator = ratioDescriptorIDs.iterator();
while (iterator.hasNext()) {
final Integer ratioDescriptorID = iterator.next();
if (getConditionsByRatioDescriptorsTableMap().containsKey(ratioDescriptorID)) {
ret.addAll(getConditionsByRatioDescriptorsTableMap().get(ratioDescriptorID));
}
}
return ret;
}
@Override
protected int[][] getMapTableFromDB() {
final int[][] ratioDescriptors = PreparedCriteria.getRatioDescriptorsTable();
// this table has 3 elements per row, the descriptor id, and 2 condition ids
// so, because the descriptor id is always unique, we create a matrix of size of
// double the number of rows
final int[][] ret = new int[ratioDescriptors.length * 2][2];
int i = 0;
for (final int[] tripplet : ratioDescriptors) {
final int ratioDescriptorID = tripplet[0];
final int condition1ID = tripplet[1];
final int condition2ID = tripplet[2];
ret[i][0] = condition1ID;
ret[i][1] = ratioDescriptorID;
i++;
ret[i][0] = condition2ID;
ret[i][1] = ratioDescriptorID;
i++;
}
return ret;
}
/**
* Removes mapping tables and instance, so that next call to getInstance() will
* query the database again to populate maps
*/
@Override
public void clear() {
getConditionsByRatioDescriptorsTableMap().clear();
getRatioDescriptorsByConditionsTableMap().clear();
instance = null;
}
private TIntObjectHashMap<TIntSet> getConditionsByRatioDescriptorsTableMap() {
return super.get_1By2Map();
}
private TIntObjectHashMap<TIntSet> getRatioDescriptorsByConditionsTableMap() {
return super.get_2By1Map();
}
}
|
/*
* Copyright (C) 2004-2016 L2J DataPack
*
* This file is part of L2J DataPack.
*
* L2J DataPack is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* L2J DataPack is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package instances.ChambersOfDelusion;
import com.l2jserver.gameserver.model.Location;
/**
* Chamber of Delusion South.
* @author GKR
*/
public final class ChamberOfDelusionSouth extends Chamber
{
// NPC's
private static final int ENTRANCE_GATEKEEPER = 32660;
private static final int ROOM_GATEKEEPER_FIRST = 32674;
private static final int ROOM_GATEKEEPER_LAST = 32678;
private static final int AENKINEL = 25692;
private static final int BOX = 18838;
// Misc
private static final Location[] ENTER_POINTS = new Location[]
{
new Location(-122368, -207820, -6720),
new Location(-122368, -206940, -6720),
new Location(-122368, -209116, -6720),
new Location(-121456, -207356, -6720),
new Location(-121440, -209004, -6720), // Raid room
};
private static final int INSTANCEID = 129; // this is the client number
private static final String INSTANCE_TEMPLATE = "ChamberOfDelusionSouth.xml";
public ChamberOfDelusionSouth()
{
super(ChamberOfDelusionSouth.class.getSimpleName(), "instances", INSTANCEID, INSTANCE_TEMPLATE, ENTRANCE_GATEKEEPER, ROOM_GATEKEEPER_FIRST, ROOM_GATEKEEPER_LAST, AENKINEL, BOX);
ROOM_ENTER_POINTS = ENTER_POINTS;
}
}
|
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.server.domain.oauth;
import java.util.Map;
import com.thoughtworks.go.server.oauth.OauthDataSource;
/**
* @understands authorization of oauth client to access service on users behalf
*/
public class OauthAuthorization extends OauthDomainEntity<OauthDataSource.OauthAuthorizationDTO> {
private String userId;
private OauthClient oauthClient;
private String code;
private long expiresAt;
public OauthAuthorization(String userId, OauthClient oauthClient, String code, long expiresAt) {
this();
this.userId = userId;
this.oauthClient = oauthClient;
this.code = code;
this.expiresAt = expiresAt;
}
private OauthAuthorization() {
//for hibernate
}
public OauthAuthorization(Map attributes, OauthClient oauthClient) {
this((String) attributes.get("user_id"), oauthClient, (String) attributes.get("code"), (Long) attributes.get("expires_at"));
setIdIfAvailable(attributes);
}
public OauthDataSource.OauthAuthorizationDTO getDTO() {
OauthDataSource.OauthAuthorizationDTO dto = new OauthDataSource.OauthAuthorizationDTO();
dto.setId(getId());
dto.setUserId(userId);
dto.setOauthClientId(String.valueOf(oauthClient.getId()));
dto.setCode(code);
dto.setExpiresAt(expiresAt);
return dto;
}
}
|
package com.shark.util.util;
import com.shark.util.util.OS.OSInfo;
import com.shark.util.util.OS.Platform;
public class SeparatorUtil {
public static String getPathSeparatorByOS(){
Platform platform= OSInfo.getOSname();
if (platform==Platform.Windows){
return ";";
}else {
return ":";
}
}
}
|
/*
* Copyright 2010-2021 Australian Signals Directorate
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.gov.asd.tac.constellation.utilities.tooltip;
import au.gov.asd.tac.constellation.utilities.tooltip.TooltipProvider.TooltipDefinition;
import java.time.Duration;
import java.util.List;
import javafx.geometry.Point2D;
import javafx.scene.control.Hyperlink;
import javafx.scene.control.TextInputControl;
import javafx.scene.control.skin.TextAreaSkin;
import javafx.scene.control.skin.TextFieldSkin;
import javafx.scene.control.skin.TextInputControlSkin;
import javafx.scene.text.HitInfo;
import org.fxmisc.richtext.InlineCssTextArea;
import org.fxmisc.richtext.event.MouseOverTextEvent;
/**
* Handles tool tip rendering on TextInputControl objects.
*
* @author sirius
* @author sol695510
*/
public class TooltipUtilities {
public static void activateTextInputControl(final TextInputControl textInputControl, final TooltipPane tooltipPane) {
final int[] characterIndex = new int[1];
final TooltipNode[] tooltipNode = new TooltipNode[1];
textInputControl.setOnMouseEntered(event -> {
if (tooltipPane.isEnabled()) {
TextInputControlSkin<?> skin = (TextInputControlSkin<?>) textInputControl.getSkin();
HitInfo info = (skin instanceof TextAreaSkin) ? ((TextAreaSkin) skin).getIndex(event.getX(), event.getY()) : ((TextFieldSkin) skin).getIndex(event.getX(), event.getY());
characterIndex[0] = info.getCharIndex();
List<TooltipProvider.TooltipDefinition> definitions = TooltipProvider.getTooltips(textInputControl.getText(), characterIndex[0]);
textInputControl.requestFocus();
selectActiveArea(textInputControl, definitions);
if (!definitions.isEmpty()) {
tooltipNode[0] = new TooltipNode();
tooltipNode[0].setTooltips(definitions);
Point2D location = textInputControl.localToScene(event.getX(), textInputControl.getHeight());
tooltipPane.showTooltip(tooltipNode[0], location.getX(), location.getY());
}
}
});
textInputControl.setOnMouseMoved(event -> {
if (tooltipPane.isEnabled()) {
TextInputControlSkin<?> skin = (TextInputControlSkin<?>) textInputControl.getSkin();
HitInfo info = (skin instanceof TextAreaSkin) ? ((TextAreaSkin) skin).getIndex(event.getX(), event.getY()) : ((TextFieldSkin) skin).getIndex(event.getX(), event.getY());
// If the mouse is over a different character then get new tooltips
if (info.getCharIndex() != characterIndex[0]) {
characterIndex[0] = info.getCharIndex();
List<TooltipProvider.TooltipDefinition> definitions = TooltipProvider.getTooltips(textInputControl.getText(), characterIndex[0]);
selectActiveArea(textInputControl, definitions);
if (definitions.isEmpty()) {
tooltipNode[0] = null;
} else {
tooltipNode[0] = new TooltipNode();
tooltipNode[0].setTooltips(definitions);
}
}
// If we have a tooltip then reposition under mouse
if (tooltipNode[0] == null) {
tooltipPane.hideTooltip();
} else {
Point2D location = textInputControl.localToScene(event.getX(), textInputControl.getHeight());
tooltipPane.showTooltip(tooltipNode[0], location.getX(), location.getY());
}
}
});
textInputControl.setOnMouseExited(event -> {
if (tooltipPane.isEnabled()) {
tooltipPane.hideTooltip();
}
});
}
private static final double HYPERLINK_TOOLTIP_VERTICAL_GAP = 15.0;
public static void activateTextInputControl(final Hyperlink hyperlink, final TooltipPane tooltipPane) {
final TooltipNode[] tooltipNode = new TooltipNode[1];
hyperlink.setOnMouseEntered(event -> {
if (tooltipPane.isEnabled()) {
List<TooltipProvider.TooltipDefinition> definitions = TooltipProvider.getAllTooltips(hyperlink.getText());
hyperlink.requestFocus();
if (!definitions.isEmpty()) {
tooltipNode[0] = new TooltipNode();
tooltipNode[0].setTooltips(definitions);
Point2D location = hyperlink.localToScene(event.getX(), event.getY() + hyperlink.getHeight() + HYPERLINK_TOOLTIP_VERTICAL_GAP);
tooltipPane.showTooltip(tooltipNode[0], location.getX(), location.getY());
}
}
});
hyperlink.setOnMouseExited(event -> {
if (tooltipPane.isEnabled()) {
tooltipPane.hideTooltip();
}
});
}
private static void selectActiveArea(final TextInputControl control, final List<TooltipProvider.TooltipDefinition> definitions) {
int s = Integer.MAX_VALUE;
int e = Integer.MIN_VALUE;
for (final TooltipDefinition definition : definitions) {
if (definition.getStart() >= 0 && definition.getStart() < s) {
s = definition.getStart();
}
if (definition.getFinish() >= 0 && definition.getFinish() > e) {
e = definition.getFinish();
}
}
if (s != Integer.MAX_VALUE && e != Integer.MIN_VALUE) {
control.selectRange(s, e);
}
}
public static void activateTextInputControl(final InlineCssTextArea textArea, final TooltipPane tooltipPane) {
final int[] characterIndex = new int[1];
final TooltipNode[] tooltipNode = new TooltipNode[1];
textArea.setMouseOverTextDelay(Duration.ofMillis(100));
textArea.addEventHandler(MouseOverTextEvent.MOUSE_OVER_TEXT_BEGIN, event -> {
if (tooltipPane.isEnabled()) {
characterIndex[0] = event.getCharacterIndex();
List<TooltipProvider.TooltipDefinition> definitions = TooltipProvider.getTooltips(textArea.getText(), characterIndex[0]);
textArea.requestFocus();
selectActiveArea(textArea, definitions);
if (!definitions.isEmpty()) {
tooltipNode[0] = new TooltipNode();
tooltipNode[0].setTooltips(definitions);
Point2D location = event.getScreenPosition();
tooltipPane.showTooltip(tooltipNode[0], location.getX(), location.getY());
}
}
});
textArea.addEventHandler(MouseOverTextEvent.MOUSE_OVER_TEXT_END, event -> {
if (tooltipPane.isEnabled()) {
tooltipPane.hideTooltip();
}
});
}
private static void selectActiveArea(final InlineCssTextArea textArea, final List<TooltipProvider.TooltipDefinition> definitions) {
int s = Integer.MAX_VALUE;
int e = Integer.MIN_VALUE;
for (final TooltipDefinition definition : definitions) {
if (definition.getStart() >= 0 && definition.getStart() < s) {
s = definition.getStart();
}
if (definition.getFinish() >= 0 && definition.getFinish() > e) {
e = definition.getFinish();
}
}
if (s != Integer.MAX_VALUE && e != Integer.MIN_VALUE) {
textArea.selectRange(s, e);
}
}
}
|
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.observable;
import io.reactivex.*;
import io.reactivex.disposables.Disposable;
import io.reactivex.internal.fuseable.FuseToObservable;
import io.reactivex.plugins.RxJavaPlugins;
public final class ObservableIgnoreElementsCompletable<T> extends Completable implements FuseToObservable<T> {
final ObservableSource<T> source;
public ObservableIgnoreElementsCompletable(ObservableSource<T> source) {
this.source = source;
}
@Override
public void subscribeActual(final CompletableObserver t) {
source.subscribe(new IgnoreObservable<T>(t));
}
@Override
public Observable<T> fuseToObservable() {
return RxJavaPlugins.onAssembly(new ObservableIgnoreElements<T>(source));
}
static final class IgnoreObservable<T> implements Observer<T>, Disposable {
final CompletableObserver actual;
Disposable d;
IgnoreObservable(CompletableObserver t) {
this.actual = t;
}
@Override
public void onSubscribe(Disposable s) {
this.d = s;
actual.onSubscribe(this);
}
@Override
public void onNext(T v) {
// deliberately ignored
}
@Override
public void onError(Throwable e) {
actual.onError(e);
}
@Override
public void onComplete() {
actual.onComplete();
}
@Override
public void dispose() {
d.dispose();
}
@Override
public boolean isDisposed() {
return d.isDisposed();
}
}
}
|
package fr.novaria.skygrid.api;
import fr.novaria.skygrid.api.block.BlockGroup;
import fr.novaria.skygrid.api.block.RandomBlockGroup;
import fr.novaria.skygrid.api.chest.ChestItem;
import fr.novaria.skygrid.api.chest.ChestItems;
import fr.novaria.skygrid.api.chest.ChestQuantity;
import fr.novaria.skygrid.api.chest.RandomChestItems;
import fr.novaria.skygrid.api.creature.CreatureGroup;
import fr.novaria.skygrid.api.creature.EntityTypeCreatureGroup;
import fr.novaria.skygrid.api.creature.RandomCreatureGroup;
import fr.novaria.utils.MinecraftUtils;
import fr.novaria.utils.base.LocationFunction;
import fr.novaria.utils.collect.RandomCollection;
import org.bukkit.Material;
import org.bukkit.entity.EntityType;
import org.bukkit.inventory.ItemStack;
import java.util.*;
public class SkyGridWorld {
public static final int BLOCK_SPACE = 4;
public static final int DEFAULT_WORLD_HEIGHT = 128;
public static final ItemStack[] EMPTY_ITEM_STACK_ARRAY = new ItemStack[0];
private int height;
private final RandomBlockGroup blockGroups = new RandomBlockGroup();
private final RandomChestItems chestItems = new RandomChestItems();
private final RandomCollection<ChestQuantity> chestQuantities = new RandomCollection<>();
private final RandomCreatureGroup creatureGroups = new RandomCreatureGroup();
public SkyGridWorld() {
this(DEFAULT_WORLD_HEIGHT);
}
public SkyGridWorld(int height) {
this.height = height;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
public int getHighestBlockY() {
final int heightIndex = height - 1;
final int rest = heightIndex % BLOCK_SPACE;
return heightIndex - rest;
}
public Material getRandomBlock(Random random) {
return blockGroups.getRandomBlock(random);
}
public SkyGridWorld addBlockGroup(Material material, double weight) {
this.blockGroups.addBlockGroup(material, weight);
return this;
}
public SkyGridWorld addBlockGroup(BlockGroup blockGroup, double weight) {
this.blockGroups.addBlockGroup(blockGroup, weight);
return this;
}
public SkyGridWorld addBlockGroups(Iterator<? extends Map.Entry<? extends BlockGroup, ? extends Number>> blockGroups) {
this.blockGroups.addBlockGroups(blockGroups);
return this;
}
public ChestItem getRandomChestItem(Random random) {
final ChestItems chestItems = this.chestItems.getRandomChestItem(random);
return chestItems == null ? null : chestItems.getRandomChestItem(random);
}
public SkyGridWorld addChestItems(Material material, int count, double weight) {
this.chestItems.addChestItem(material, count, weight);
return this;
}
public SkyGridWorld addChestItems(ChestItems chestItems, double weight) {
this.chestItems.addChestItem(chestItems, weight);
return this;
}
public SkyGridWorld addChestItems(Iterator<? extends Map.Entry<? extends ChestItems, ? extends Number>> chestItems) {
this.chestItems.addChestItems(chestItems);
return this;
}
public int getRandomChestQuantity(Random random) {
final ChestQuantity chestQuantity = this.chestQuantities.get(random);
return chestQuantity == null ? 0 : chestQuantity.getRandomCount(random);
}
public SkyGridWorld addChestQuantities(Iterator<? extends Map.Entry<? extends ChestQuantity, ? extends Number>> quantities) {
this.chestQuantities.addAll(quantities);
return this;
}
public SkyGridWorld addChestQuantity(int quantity, double weight) {
this.chestQuantities.add(new ChestQuantity(quantity), weight);
return this;
}
public EntityType getRandomCreature(Random random) {
return creatureGroups.getRandomCreature(random);
}
public SkyGridWorld addCreatureGroup(EntityType creature, double weight) {
return addCreatureGroup(new EntityTypeCreatureGroup(creature), weight);
}
public SkyGridWorld addCreatureGroup(CreatureGroup creatureGroup, double weight) {
this.creatureGroups.addCreatureGroup(creatureGroup, weight);
return this;
}
public SkyGridWorld addCreatureGroups(Iterator<? extends Map.Entry<? extends CreatureGroup, ? extends Number>> creatureGroups) {
this.creatureGroups.addCreatureGroups(creatureGroups);
return this;
}
public void chunkIteration(LocationFunction function) {
final int highestBlockY = getHighestBlockY();
chunkIteration(BLOCK_SPACE, highestBlockY, function);
}
public void chunkIteration(int step, int maxY, LocationFunction function) {
for (int y = 0; y <= maxY; y += step) {
for (int x = 0; x < MinecraftUtils.CHUNK_SIZE; x += step) {
for (int z = 0; z < MinecraftUtils.CHUNK_SIZE; z += step) {
function.apply(x, y, z);
}
}
}
}
public ItemStack[] buildItemStacks(Random random) {
final int quantity = getRandomChestQuantity(random);
if (quantity == 0) {
return EMPTY_ITEM_STACK_ARRAY;
}
final List<ItemStack> itemStacks = new ArrayList<>(quantity);
for (int i = 0; i < quantity; i++) {
final ChestItem chestItem = getRandomChestItem(random);
if (chestItem == null) {
continue;
}
final Material material = chestItem.getRandomBlock(random);
if (material == null) {
continue;
}
final int count = chestItem.getRandomCount(random);
itemStacks.add(new ItemStack(material, count));
}
return itemStacks.isEmpty() ? EMPTY_ITEM_STACK_ARRAY : itemStacks.toArray(new ItemStack[0]);
}
@Override
public int hashCode() {
return Objects.hash(height, blockGroups, chestItems, chestQuantities, creatureGroups);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof SkyGridWorld)) {
return false;
}
final SkyGridWorld that = (SkyGridWorld) obj;
return height == that.height &&
blockGroups.equals(that.blockGroups) &&
chestItems.equals(that.chestItems) &&
chestQuantities.equals(that.chestQuantities) &&
creatureGroups.equals(that.creatureGroups);
}
@Override
public String toString() {
return super.toString();
}
}
|
package com.oracle.truffle.bpf.nodes.util;
//All opcode constants taken from bpf-rpython
public final class EBPFOpcodes {
public static final byte EBPF_CLS_LD = 0x00;
public static final byte EBPF_CLS_LDX = 0x01;
public static final byte EBPF_CLS_ST = 0x02;
public static final byte EBPF_CLS_STX = 0x03;
public static final byte EBPF_CLS_ALU = 0x04;
public static final byte EBPF_CLS_JMP = 0x05;
public static final byte EBPF_CLS_ALU64 = 0x07;
public static final byte EBPF_SRC_IMM = 0x00;
public static final byte EBPF_SRC_REG = 0x08;
public static final byte EBPF_SIZE_W = 0x00;
public static final byte EBPF_SIZE_H = 0x08;
public static final byte EBPF_SIZE_B = 0x10;
public static final byte EBPF_SIZE_DW = 0x18;
public static final byte EBPF_MODE_IMM = 0x00;
public static final byte EBPF_MODE_MEM = 0x60;
public static final byte EBPF_OP_ADD_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x00);
public static final byte EBPF_OP_ADD_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x00);
public static final byte EBPF_OP_SUB_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x10);
public static final byte EBPF_OP_SUB_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x10);
public static final byte EBPF_OP_MUL_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x20);
public static final byte EBPF_OP_MUL_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x20);
public static final byte EBPF_OP_DIV_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x30);
public static final byte EBPF_OP_DIV_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x30);
public static final byte EBPF_OP_OR_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x40);
public static final byte EBPF_OP_OR_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x40);
public static final byte EBPF_OP_AND_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x50);
public static final byte EBPF_OP_AND_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x50);
public static final byte EBPF_OP_LSH_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x60);
public static final byte EBPF_OP_LSH_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x60);
public static final byte EBPF_OP_RSH_IMM = (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x70);
public static final byte EBPF_OP_RSH_REG = (EBPF_CLS_ALU | EBPF_SRC_REG | 0x70);
public static final byte EBPF_OP_NEG = (byte) (EBPF_CLS_ALU | 0x80);
public static final byte EBPF_OP_MOD_IMM = (byte) (EBPF_CLS_ALU | EBPF_SRC_IMM | 0x90);
public static final byte EBPF_OP_MOD_REG = (byte) (EBPF_CLS_ALU | EBPF_SRC_REG | 0x90);
public static final byte EBPF_OP_XOR_IMM = (byte) (EBPF_CLS_ALU | EBPF_SRC_IMM | 0xa0);
public static final byte EBPF_OP_XOR_REG = (byte) (EBPF_CLS_ALU | EBPF_SRC_REG | 0xa0);
public static final byte EBPF_OP_MOV_IMM = (byte) (EBPF_CLS_ALU | EBPF_SRC_IMM | 0xb0);
public static final byte EBPF_OP_MOV_REG = (byte) (EBPF_CLS_ALU | EBPF_SRC_REG | 0xb0);
public static final byte EBPF_OP_ARSH_IMM = (byte) (EBPF_CLS_ALU | EBPF_SRC_IMM | 0xc0);
public static final byte EBPF_OP_ARSH_REG = (byte) (EBPF_CLS_ALU | EBPF_SRC_REG | 0xc0);
public static final byte EBPF_OP_LE = (byte) (EBPF_CLS_ALU | EBPF_SRC_IMM | 0xd0);
public static final byte EBPF_OP_BE = (byte) (EBPF_CLS_ALU | EBPF_SRC_REG | 0xd0);
public static final byte EBPF_OP_ADD64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x00);
public static final byte EBPF_OP_ADD64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x00);
public static final byte EBPF_OP_SUB64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x10);
public static final byte EBPF_OP_SUB64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x10);
public static final byte EBPF_OP_MUL64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x20);
public static final byte EBPF_OP_MUL64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x20);
public static final byte EBPF_OP_DIV64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x30);
public static final byte EBPF_OP_DIV64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x30);
public static final byte EBPF_OP_OR64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x40);
public static final byte EBPF_OP_OR64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x40);
public static final byte EBPF_OP_AND64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x50);
public static final byte EBPF_OP_AND64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x50);
public static final byte EBPF_OP_LSH64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x60);
public static final byte EBPF_OP_LSH64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x60);
public static final byte EBPF_OP_RSH64_IMM = (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x70);
public static final byte EBPF_OP_RSH64_REG = (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x70);
public static final byte EBPF_OP_NEG64 = (byte) (EBPF_CLS_ALU64 | 0x80);
public static final byte EBPF_OP_MOD64_IMM = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0x90);
public static final byte EBPF_OP_MOD64_REG = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0x90);
public static final byte EBPF_OP_XOR64_IMM = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0xa0);
public static final byte EBPF_OP_XOR64_REG = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0xa0);
public static final byte EBPF_OP_MOV64_IMM = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0xb0);
public static final byte EBPF_OP_MOV64_REG = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0xb0);
public static final byte EBPF_OP_ARSH64_IMM = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_IMM | 0xc0);
public static final byte EBPF_OP_ARSH64_REG = (byte) (EBPF_CLS_ALU64 | EBPF_SRC_REG | 0xc0);
public static final byte EBPF_OP_LDXW = (EBPF_CLS_LDX | EBPF_MODE_MEM | EBPF_SIZE_W);
public static final byte EBPF_OP_LDXH = (EBPF_CLS_LDX | EBPF_MODE_MEM | EBPF_SIZE_H);
public static final byte EBPF_OP_LDXB = (EBPF_CLS_LDX | EBPF_MODE_MEM | EBPF_SIZE_B);
public static final byte EBPF_OP_LDXDW = (EBPF_CLS_LDX | EBPF_MODE_MEM | EBPF_SIZE_DW);
public static final byte EBPF_OP_STW = (EBPF_CLS_ST | EBPF_MODE_MEM | EBPF_SIZE_W);
public static final byte EBPF_OP_STH = (EBPF_CLS_ST | EBPF_MODE_MEM | EBPF_SIZE_H);
public static final byte EBPF_OP_STB = (EBPF_CLS_ST | EBPF_MODE_MEM | EBPF_SIZE_B);
public static final byte EBPF_OP_STDW = (EBPF_CLS_ST | EBPF_MODE_MEM | EBPF_SIZE_DW);
public static final byte EBPF_OP_STXW = (EBPF_CLS_STX | EBPF_MODE_MEM | EBPF_SIZE_W);
public static final byte EBPF_OP_STXH = (EBPF_CLS_STX | EBPF_MODE_MEM | EBPF_SIZE_H);
public static final byte EBPF_OP_STXB = (EBPF_CLS_STX | EBPF_MODE_MEM | EBPF_SIZE_B);
public static final byte EBPF_OP_STXDW = (EBPF_CLS_STX | EBPF_MODE_MEM | EBPF_SIZE_DW);
public static final byte EBPF_OP_LDDW = (EBPF_CLS_LD | EBPF_MODE_IMM | EBPF_SIZE_DW);
public static final byte EBPF_OP_JA = (EBPF_CLS_JMP | 0x00);
public static final byte EBPF_OP_JEQ_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x10);
public static final byte EBPF_OP_JEQ_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x10);
public static final byte EBPF_OP_JGT_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x20);
public static final byte EBPF_OP_JGT_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x20);
public static final byte EBPF_OP_JGE_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x30);
public static final byte EBPF_OP_JGE_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x30);
public static final byte EBPF_OP_JSET_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x40);
public static final byte EBPF_OP_JSET_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x40);
public static final byte EBPF_OP_JNE_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x50);
public static final byte EBPF_OP_JNE_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x50);
public static final byte EBPF_OP_JSGT_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x60);
public static final byte EBPF_OP_JSGT_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x60);
public static final byte EBPF_OP_JSGE_IMM = (EBPF_CLS_JMP | EBPF_SRC_IMM | 0x70);
public static final byte EBPF_OP_JSGE_REG = (EBPF_CLS_JMP | EBPF_SRC_REG | 0x70);
public static final byte EBPF_OP_CALL = (byte) (EBPF_CLS_JMP|0x80);
public static final byte EBPF_OP_EXIT = (byte) (EBPF_CLS_JMP | 0x90);
public static final byte EBPF_OP_JLT_IMM = (byte) (EBPF_CLS_JMP | EBPF_SRC_IMM | 0xa0);
public static final byte EBPF_OP_JLT_REG = (byte) (EBPF_CLS_JMP | EBPF_SRC_REG | 0xa0);
public static final byte EBPF_OP_JLE_IMM = (byte) (EBPF_CLS_JMP | EBPF_SRC_IMM | 0xb0);
public static final byte EBPF_OP_JLE_REG = (byte) (EBPF_CLS_JMP | EBPF_SRC_REG | 0xb0);
public static final byte EBPF_OP_JSLT_IMM = (byte) (EBPF_CLS_JMP | EBPF_SRC_IMM | 0xc0);
public static final byte EBPF_OP_JSLT_REG = (byte) (EBPF_CLS_JMP | EBPF_SRC_REG | 0xc0);
public static final byte EBPF_OP_JSLE_IMM = (byte) (EBPF_CLS_JMP | EBPF_SRC_IMM | 0xd0);
public static final byte EBPF_OP_JSLE_REG = (byte) (EBPF_CLS_JMP | EBPF_SRC_REG | 0xd0);
public static final byte EBPF_CLS_MASK = 0x07;
public static final byte EBPF_ALU_OP_MASK = (byte) 0xf0;
}
|
package software.plusminus.data.service.entity.test;
import lombok.Data;
import javax.persistence.Embedded;
import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass;
import javax.persistence.PrimaryKeyJoinColumn;
@Data
@MappedSuperclass
public abstract class AbstractMultiEntity {
@ManyToOne
@PrimaryKeyJoinColumn
private MultiEntity left;
@Embedded
private EmbeddableClass embeddable;
}
|
package org.sunbird.integration.test.user.skills;
import com.consol.citrus.annotations.CitrusTest;
import com.consol.citrus.testng.CitrusParameters;
import javax.ws.rs.core.MediaType;
import org.springframework.http.HttpStatus;
import org.sunbird.integration.test.common.BaseCitrusTestRunner;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class ReadUserSkillTest extends BaseCitrusTestRunner {
public static final String TEST_NAME_READ_USER_SKILL_FAILURE_WITHOUT_ACCESS_TOKEN =
"testReadUserSkillFailureWithoutAccessToken";
public static final String TEST_NAME_READ_USER_SKILL_FAILURE_WITH_INVALID_ENDORSED_USER_ID =
"testReadUserSkillFailureWithInvalidEndorsedUserId";
public static final String TEST_NAME_READ_USER_SKILL_FAILURE_WITHOUT_ENDORSED_USER_ID =
"testReadUserSkillFailureWithoutEndorsedUserId";
public static final String TEMPLATE_DIR = "templates/user/skill/read";
private String getReadUserSkillUrl() {
return getLmsApiUriPath("/api/user/v1/skill/read", "/v1/user/skill/read");
}
@DataProvider(name = "readUserSkillFailureDataProvider")
public Object[][] readUserSkillFailureDataProvider() {
return new Object[][] {
new Object[] {
TEST_NAME_READ_USER_SKILL_FAILURE_WITHOUT_ACCESS_TOKEN, false, HttpStatus.UNAUTHORIZED
},
new Object[] {
TEST_NAME_READ_USER_SKILL_FAILURE_WITH_INVALID_ENDORSED_USER_ID,
true,
HttpStatus.BAD_REQUEST
},
new Object[] {
TEST_NAME_READ_USER_SKILL_FAILURE_WITHOUT_ENDORSED_USER_ID, true, HttpStatus.BAD_REQUEST
},
};
}
@Test(dataProvider = "readUserSkillFailureDataProvider")
@CitrusParameters({"testName", "isAuthRequired", "httpStatusCode"})
@CitrusTest
public void testReadUserSkillFailure(
String testName, boolean isAuthRequired, HttpStatus httpStatusCode) {
getTestCase().setName(testName);
getAuthToken(this, isAuthRequired);
getTestCase().setName(testName);
performPostTest(
this,
TEMPLATE_DIR,
testName,
getReadUserSkillUrl(),
REQUEST_JSON,
MediaType.APPLICATION_JSON,
isAuthRequired,
httpStatusCode,
RESPONSE_JSON);
}
}
|
package com.lchtime.safetyexpress.ui.circle;
import android.content.Context;
import android.content.Intent;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.bumptech.glide.Glide;
import com.hyphenate.easeui.bean.ContactBean;
import com.lchtime.safetyexpress.R;
import com.lchtime.safetyexpress.ui.chat.hx.activity.UserProfileActivity;
import com.lchtime.safetyexpress.ui.chat.hx.adapter.AddFriendsCommendAdapter;
import com.lchtime.safetyexpress.ui.chat.hx.bean.UserBean;
import java.util.List;
import static android.R.attr.type;
import static com.lchtime.safetyexpress.ui.chat.hx.adapter.AddFriendsCommendAdapter.FIND_FRIENDS;
/**
* Created by ${Hongcha36} on 2017/11/28.
*/
public class MyFriendAdapter extends RecyclerView.Adapter<AddFriendsCommendAdapter.MyHolder> {
private Context mContext;
private List<ContactBean> mFriends;
public MyFriendAdapter(Context mContext, List<ContactBean> mFriends) {
this.mContext = mContext;
this.mFriends = mFriends;
}
@Override
public AddFriendsCommendAdapter.MyHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(mContext).inflate(R.layout.em_row_add_friend_item , null);
AddFriendsCommendAdapter.MyHolder holder = new AddFriendsCommendAdapter.MyHolder(view);
return holder;
}
@Override
public void onBindViewHolder(AddFriendsCommendAdapter.MyHolder holder, int position) {
final ContactBean bean = mFriends.get(position);
Glide.with(mContext)
.load(bean.ud_photo_fileid)
.placeholder(R.drawable.qun_list)
.error(R.drawable.qun_list)
.into(holder.mAvatar);
holder.mName.setText(bean.ud_nickname);
holder.mMessage.setText(bean.user);
//隐藏添加好友
holder.mIndicator.setVisibility(View.GONE);
holder.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(mContext, UserProfileActivity.class);
intent.putExtra("username", bean.hx_account);
mContext.startActivity(intent);
}
});
}
@Override
public int getItemCount() {
return mFriends == null ? 0 : mFriends.size();
}
}
|
package com.zc.server.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.zc.server.pojo.SalaryAdjust;
/**
* <p>
* Mapper 接口
* </p>
*
* @author MoYu
* @since 2021-04-10
*/
public interface SalaryAdjustMapper extends BaseMapper<SalaryAdjust> {
}
|
package leilao;
public class Lance {
private Usuario usuario;
private double valor;
public Lance(Usuario usuario, double valor) {
this.usuario = usuario;
this.valor = valor;
}
public Usuario getUsuario() {
return usuario;
}
public double getValor() {
return valor;
}
}
|
//Copyright (C) 2014 TU Dortmund
//This file is part of LearnLib, http://www.learnlib.de/.
//
//LearnLib is free software; you can redistribute it and/or
//modify it under the terms of the GNU Lesser General Public
//License version 3.0 as published by the Free Software Foundation.
//
//LearnLib is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
//Lesser General Public License for more details.
//
//You should have received a copy of the GNU Lesser General Public
//License along with LearnLib; if not, see
//http://www.gnu.de/documents/lgpl.en.html.
package de.learnlib.rajava.automata.expressions;
import de.learnlib.rajava.automata.domain.DataValue;
/**
* a != b
*
*/
public class BinaryInEquality implements Relation {
@Override
public int arity() {
return 2;
}
@Override
public boolean contains(DataValue[] tuple) {
return !tuple[0].equals(tuple[1]);
}
@Override
public String toString() {
return "!=";
}
}
|
/*
* Copyright (C) 2013 4th Line GmbH, Switzerland
*
* The contents of this file are subject to the terms of either the GNU
* Lesser General Public License Version 2 or later ("LGPL") or the
* Common Development and Distribution License Version 1 or later
* ("CDDL") (collectively, the "License"). You may not use this file
* except in compliance with the License. See LICENSE.txt for more
* information.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
package org.fourthline.cling.workbench.spi;
import javax.swing.JPanel;
import javax.swing.ImageIcon;
import javax.swing.Icon;
import java.awt.Dimension;
import java.awt.Graphics;
public class DitheredBackgroundPanel extends JPanel {
protected Icon ditherBackground =
new ImageIcon(DitheredBackgroundPanel.class.getResource("img/ditherbackground.png"));
public void paintComponent(Graphics g) {
Dimension dim = getSize();
int x, y;
if (isOpaque()) {
super.paintComponent(g);
}
for (y = 0; y < dim.height; y += ditherBackground.getIconHeight()) {
for (x = 0; x < dim.width; x += ditherBackground.getIconWidth()) {
ditherBackground.paintIcon(this, g, x, y);
}
}
}
}
|
/**
* Allows the constructors to be edited.
* @author Alan Jeffrey
* @version v1.0 1998/06/03
*/
public class PremonTargetConstructors
extends PremonTarget
{
private PremonJJTCon oriConstructors;
private PremonCon oriConstructorsDesugared;
/**
* Create a new target.
* @param oriConstructors the raw context to edit.
* @param oriConstructorsDesugared the desugared context to edit.
*/
public PremonTargetConstructors (PremonJJTCon oriConstructors, PremonCon oriConstructorsDesugared) {
this.oriConstructors = oriConstructors;
this.oriConstructorsDesugared = oriConstructorsDesugared;
text = oriConstructors;
parsed = oriConstructors;
desugared = oriConstructorsDesugared;
name = "constructors";
}
public void desugar () throws TypeError {
desugared = ((PremonJJTCon)parsed).desugarPrimitives ();
}
public void parse (String s) throws ParseException {
text = new PrintableString (s);
parsed = new PremonJJTParser (s).parseCon ();
}
public void reset () {
text = oriConstructors;
parsed = oriConstructors;
desugared = oriConstructorsDesugared;
}
/**
* The desugared constructors.
* @return the desugared constructors.
*/
public PremonCon constructors () {
return (PremonCon)desugared;
}
}
|
package com.ahmetc.yazimkurallari.Database;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
public class DatabaseHelper extends SQLiteOpenHelper {
public DatabaseHelper(Context context) {
super(context, "yazimkurallari.sqlite",null, 1);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL("CREATE TABLE IF NOT EXISTS \"kategoriler\" (\n" +
"\t\"kategori_id\"\tINTEGER PRIMARY KEY AUTOINCREMENT,\n" +
"\t\"kategori_ad\"\tTEXT\n" +
");");
db.execSQL("CREATE TABLE IF NOT EXISTS \"sorular\" (\n" +
"\t\"soru_id\"\tINTEGER PRIMARY KEY AUTOINCREMENT,\n" +
"\t\"soru_dogru\"\tTEXT,\n" +
"\t\"soru_yanlis\"\tTEXT,\n" +
"\t\"soru_yanlisyapilan\"\tINTEGER,\n" +
"\t\"soru_dogruyapilan\"\tINTEGER,\n" +
"\t\"soru_isaret\"\tINTEGER,\n" +
"\t\"kategori_id\"\tINTEGER,\n" +
"\tFOREIGN KEY(\"kategori_id\") REFERENCES \"kategoriler\"\n" +
");");
db.execSQL("CREATE TABLE IF NOT EXISTS \"kullanici\" (\n" +
"\t\"dogru_sayisi\"\tINTEGER,\n" +
"\t\"yanlis_sayisi\"\tINTEGER\n" +
");");
db.execSQL("CREATE TABLE IF NOT EXISTS \"ozel_sorular\" (\n" +
"\t\"soru_id\"\tINTEGER PRIMARY KEY AUTOINCREMENT,\n" +
"\t\"soru_dogru\"\tTEXT,\n" +
"\t\"soru_yanlis\"\tTEXT,\n" +
"\t\"soru_yanlisyapilan\"\tINTEGER,\n" +
"\t\"soru_dogruyapilan\"\tINTEGER,\n" +
"\t\"soru_isaret\"\tINTEGER,\n" +
"\t\"kategori_id\"\tINTEGER,\n" +
"\tFOREIGN KEY(\"kategori_id\") REFERENCES \"kategoriler\"\n" +
");");
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS sorular");
db.execSQL("DROP TABLE IF EXISTS kategoriler");
db.execSQL("DROP TABLE IF EXISTS kullanici");
db.execSQL("DROP TABLE IF EXISTS ozel_sorular");
onCreate(db);
}
}
|
package com.example.penyyouquandemo.activity;
import android.Manifest;
import android.content.Intent;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.Toast;
import androidx.annotation.NonNull;
import com.example.penyyouquandemo.R;
import com.example.penyyouquandemo.model.Moment;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import cn.bingoogolapple.photopicker.activity.BGAPPToolbarActivity;
import cn.bingoogolapple.photopicker.activity.BGAPhotoPickerActivity;
import cn.bingoogolapple.photopicker.activity.BGAPhotoPickerPreviewActivity;
import cn.bingoogolapple.photopicker.widget.BGASortableNinePhotoLayout;
import pub.devrel.easypermissions.AfterPermissionGranted;
import pub.devrel.easypermissions.EasyPermissions;
/**
* 你自己项目里「可以不继承 BGAPPToolbarActivity」,我在这里继承 BGAPPToolbarActivity 只是为了方便写 Demo
*/
public class MomentAddActivity extends BGAPPToolbarActivity implements EasyPermissions.PermissionCallbacks, BGASortableNinePhotoLayout.Delegate {
private static final int PRC_PHOTO_PICKER = 1;
private static final int RC_CHOOSE_PHOTO = 1;
private static final int RC_PHOTO_PREVIEW = 2;
private static final String EXTRA_MOMENT = "EXTRA_MOMENT";
// ==================================== 测试图片选择器 START ====================================
/**
* 是否是单选「测试接口用的」
*/
private CheckBox mSingleChoiceCb;
/**
* 是否具有拍照功能「测试接口用的」
*/
private CheckBox mTakePhotoCb;
// ==================================== 测试图片选择器 END ====================================
// ==================================== 测试拖拽排序九宫格图片控件 START ====================================
/**
* 是否可编辑
*/
private CheckBox mEditableCb;
/**
* 是否显示九图控件的加号按钮「测试接口用的」
*/
private CheckBox mPlusCb;
/**
* 是否开启拖拽排序功能「测试接口用的」
*/
private CheckBox mSortableCb;
/**
* 拖拽排序九宫格控件
*/
private BGASortableNinePhotoLayout mPhotosSnpl;
// ==================================== 测试拖拽排序九宫格图片控件 END ====================================
private EditText mContentEt;
public static Moment getMoment(Intent intent) {
return intent.getParcelableExtra(EXTRA_MOMENT);
}
@Override
protected void initView(Bundle savedInstanceState) {
setContentView(R.layout.activity_moment_add);
mSingleChoiceCb = findViewById(R.id.cb_moment_add_single_choice);
mTakePhotoCb = findViewById(R.id.cb_moment_add_take_photo);
mEditableCb = findViewById(R.id.cb_moment_add_editable);
mPlusCb = findViewById(R.id.cb_moment_add_plus);
mSortableCb = findViewById(R.id.cb_moment_add_sortable);
mContentEt = findViewById(R.id.et_moment_add_content);
mPhotosSnpl = findViewById(R.id.snpl_moment_add_photos);
}
@Override
protected void setListener() {
mSingleChoiceCb.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
if (checked) {
mPhotosSnpl.setData(null);
mPhotosSnpl.setMaxItemCount(1);
} else {
mPhotosSnpl.setMaxItemCount(9);
}
}
});
mEditableCb.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
mPhotosSnpl.setEditable(checked);
}
});
mPlusCb.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
mPhotosSnpl.setPlusEnable(checked);
}
});
mSortableCb.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
mPhotosSnpl.setSortable(checked);
}
});
// 设置拖拽排序控件的代理
mPhotosSnpl.setDelegate(this);
}
@Override
protected void processLogic(Bundle savedInstanceState) {
setTitle("添加朋友圈");
mEditableCb.setChecked(mPhotosSnpl.isEditable());
mPlusCb.setChecked(mPhotosSnpl.isPlusEnable());
mSortableCb.setChecked(mPhotosSnpl.isSortable());
}
public void onClick(View v) {
if (v.getId() == R.id.tv_moment_add_choice_photo) {
choicePhotoWrapper();
} else if (v.getId() == R.id.tv_moment_add_publish) {
String content = mContentEt.getText().toString().trim();
if (content.length() == 0 && mPhotosSnpl.getItemCount() == 0) {
Toast.makeText(this, "必须填写这一刻的想法或选择照片!", Toast.LENGTH_SHORT).show();
return;
}
Intent intent = new Intent();
intent.putExtra(EXTRA_MOMENT, new Moment(mContentEt.getText().toString().trim(), mPhotosSnpl.getData()));
setResult(RESULT_OK, intent);
finish();
}
}
@Override
public void onClickAddNinePhotoItem(BGASortableNinePhotoLayout sortableNinePhotoLayout, View view, int position, ArrayList<String> models) {
choicePhotoWrapper();
}
@Override
public void onClickDeleteNinePhotoItem(BGASortableNinePhotoLayout sortableNinePhotoLayout, View view, int position, String model, ArrayList<String> models) {
mPhotosSnpl.removeItem(position);
}
@Override
public void onClickNinePhotoItem(BGASortableNinePhotoLayout sortableNinePhotoLayout, View view, int position, String model, ArrayList<String> models) {
Intent photoPickerPreviewIntent = new BGAPhotoPickerPreviewActivity.IntentBuilder(this)
.previewPhotos(models) // 当前预览的图片路径集合
.selectedPhotos(models) // 当前已选中的图片路径集合
.maxChooseCount(mPhotosSnpl.getMaxItemCount()) // 图片选择张数的最大值
.currentPosition(position) // 当前预览图片的索引
.isFromTakePhoto(false) // 是否是拍完照后跳转过来
.build();
startActivityForResult(photoPickerPreviewIntent, RC_PHOTO_PREVIEW);
}
@Override
public void onNinePhotoItemExchanged(BGASortableNinePhotoLayout sortableNinePhotoLayout, int fromPosition, int toPosition, ArrayList<String> models) {
Toast.makeText(this, "排序发生变化", Toast.LENGTH_SHORT).show();
}
@AfterPermissionGranted(PRC_PHOTO_PICKER)
private void choicePhotoWrapper() {
String[] perms = {Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.CAMERA};
if (EasyPermissions.hasPermissions(this, perms)) {
// 拍照后照片的存放目录,改成你自己拍照后要存放照片的目录。如果不传递该参数的话就没有拍照功能
File takePhotoDir = new File(Environment.getExternalStorageDirectory(), "BGAPhotoPickerTakePhoto");
Intent photoPickerIntent = new BGAPhotoPickerActivity.IntentBuilder(this)
.cameraFileDir(mTakePhotoCb.isChecked() ? takePhotoDir : null) // 拍照后照片的存放目录,改成你自己拍照后要存放照片的目录。如果不传递该参数的话则不开启图库里的拍照功能
.maxChooseCount(mPhotosSnpl.getMaxItemCount() - mPhotosSnpl.getItemCount()) // 图片选择张数的最大值
.selectedPhotos(null) // 当前已选中的图片路径集合
.pauseOnScroll(false) // 滚动列表时是否暂停加载图片
.build();
startActivityForResult(photoPickerIntent, RC_CHOOSE_PHOTO);
} else {
EasyPermissions.requestPermissions(this, "图片选择需要以下权限:\n\n1.访问设备上的照片\n\n2.拍照", PRC_PHOTO_PICKER, perms);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this);
}
@Override
public void onPermissionsGranted(int requestCode, List<String> perms) {
}
@Override
public void onPermissionsDenied(int requestCode, List<String> perms) {
if (requestCode == PRC_PHOTO_PICKER) {
Toast.makeText(this, "您拒绝了「图片选择」所需要的相关权限!", Toast.LENGTH_SHORT).show();
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK && requestCode == RC_CHOOSE_PHOTO) {
if (mSingleChoiceCb.isChecked()) {
mPhotosSnpl.setData(BGAPhotoPickerActivity.getSelectedPhotos(data));
} else {
mPhotosSnpl.addMoreData(BGAPhotoPickerActivity.getSelectedPhotos(data));
}
} else if (requestCode == RC_PHOTO_PREVIEW) {
mPhotosSnpl.setData(BGAPhotoPickerPreviewActivity.getSelectedPhotos(data));
}
}
}
|
package com.nandbox.bots.api.test;
import com.nandbox.bots.api.Nandbox;
import com.nandbox.bots.api.Nandbox.Api;
import com.nandbox.bots.api.NandboxClient;
import com.nandbox.bots.api.data.Chat;
import com.nandbox.bots.api.data.User;
import com.nandbox.bots.api.inmessages.BlackList;
import com.nandbox.bots.api.inmessages.ChatAdministrators;
import com.nandbox.bots.api.inmessages.ChatMember;
import com.nandbox.bots.api.inmessages.ChatMenuCallback;
import com.nandbox.bots.api.inmessages.IncomingMessage;
import com.nandbox.bots.api.inmessages.InlineMessageCallback;
import com.nandbox.bots.api.inmessages.InlineSearch;
import com.nandbox.bots.api.inmessages.MessageAck;
import com.nandbox.bots.api.inmessages.PermanentUrl;
import com.nandbox.bots.api.inmessages.WhiteList;
import net.minidev.json.JSONObject;
public class EchoTextMessage {
public static final String TOKEN = "<PUT your token here >";
public static void main(String[] args) throws Exception {
NandboxClient client = NandboxClient.get();
client.connect(TOKEN, new Nandbox.Callback() {
Nandbox.Api api = null;
@Override
public void onConnect(Api api) {
System.out.println("Authenticated");
this.api = api;
}
@Override
public void onReceive(IncomingMessage incomingMsg) {
if (incomingMsg.isTextMsg()) {
String chatId = incomingMsg.getChat().getId(); // get your chat Id
String text = incomingMsg.getText(); // get your text message
api.sendText(chatId, text); // Sending message back as an Echo
}
}
@Override
public void onReceive(JSONObject obj) {
}
@Override
public void onClose() {
}
@Override
public void onError() {
}
@Override
public void onChatMenuCallBack(ChatMenuCallback chatMenuCallback) {
}
@Override
public void onMessagAckCallback(MessageAck msgAck) {
}
@Override
public void onUserJoinedBot(User user) {
}
@Override
public void onChatMember(ChatMember chatMember) {
}
@Override
public void onChatAdministrators(ChatAdministrators chatAdministrators) {
}
@Override
public void userStartedBot(User user) {
}
@Override
public void onMyProfile(User user) {
}
@Override
public void onUserDetails(User user) {
}
@Override
public void userStoppedBot(User user) {
}
@Override
public void userLeftBot(User user) {
}
@Override
public void onInlineMessageCallback(InlineMessageCallback inlineMsgCallback) {
// TODO Auto-generated method stub
}
@Override
public void permanentUrl(PermanentUrl permenantUrl) {
// TODO Auto-generated method stub
}
@Override
public void onChatDetails(Chat chat) {
// TODO Auto-generated method stub
}
@Override
public void onInlineSearh(InlineSearch inlineSearch) {
// TODO Auto-generated method stub
}
@Override
public void onBlackList(BlackList blackList) {
// TODO Auto-generated method stub
}
@Override
public void onWhiteList(WhiteList blackList) {
// TODO Auto-generated method stub
}
@Override
public void onScheduleMessage(IncomingMessage incomingScheduleMsg) {
}
});
}
}
|
package com.example.tests;
import java.util.regex.Pattern;
import java.util.concurrent.TimeUnit;
import org.testng.annotations.*;
import static org.testng.Assert.*;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.support.ui.Select;
public class UntitledTestCase {
private WebDriver driver;
private String baseUrl;
private boolean acceptNextAlert = true;
private StringBuffer verificationErrors = new StringBuffer();
@BeforeClass(alwaysRun = true)
public void setUp() throws Exception {
driver = new FirefoxDriver();
baseUrl = "https://www.google.com/";
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
}
@Test
public void testUntitledTestCase() throws Exception {
driver.get("http://localhost/addressbook/");
driver.findElement(By.linkText("add new")).click();
driver.findElement(By.name("user")).click();
driver.findElement(By.name("user")).clear();
driver.findElement(By.name("user")).sendKeys("admin");
driver.findElement(By.name("pass")).click();
driver.findElement(By.name("pass")).clear();
driver.findElement(By.name("pass")).sendKeys("secret");
driver.findElement(By.xpath("//input[@value='Login']")).click();
driver.findElement(By.name("firstname")).click();
driver.findElement(By.name("firstname")).clear();
driver.findElement(By.name("firstname")).sendKeys("Bob");
driver.findElement(By.name("middlename")).click();
driver.findElement(By.name("middlename")).clear();
driver.findElement(By.name("middlename")).sendKeys("P");
driver.findElement(By.name("lastname")).click();
driver.findElement(By.name("lastname")).clear();
driver.findElement(By.name("lastname")).sendKeys("Jonson");
driver.findElement(By.name("email")).click();
driver.findElement(By.name("email")).clear();
driver.findElement(By.name("email")).sendKeys("test@test.ru");
driver.findElement(By.xpath("(//input[@name='submit'])[2]")).click();
driver.findElement(By.linkText("home page")).click();
driver.findElement(By.linkText("Logout")).click();
}
@AfterClass(alwaysRun = true)
public void tearDown() throws Exception {
driver.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private boolean isAlertPresent() {
try {
driver.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
}
|
package org.bouncycastle.tls;
import java.io.OutputStream;
public interface TlsCompression
{
OutputStream compress(OutputStream output);
OutputStream decompress(OutputStream output);
}
|
package com.info6250.packages.user;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import com.info6250.packages.entities.User_Address;
public class BoxItUserAddress {
private long id;
@NotNull(message = "is required")
@Pattern(regexp = "^[A-Za-z]{3,50}$", message="Must only be alphabets and the name length can be between 3 and 50 characters")
private String userName;
@NotNull(message = "is required")
@Pattern(regexp = "^[a-zA-Z0-9 ]+$", message="Only alphanumeric values accepted")
private String address;
@NotNull(message = "is required")
@Pattern(regexp = "^^\\d{5}$", message="Must be five digit")
private String zipCode;
@NotNull(message = "is required")
@Pattern(regexp = "^^\\d{10}$", message="Must be ten digit")
private String contactInfo;
@NotNull(message = "is required")
private long user_id;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getZipCode() {
return zipCode;
}
public void setZipCode(String zipCode) {
this.zipCode = zipCode;
}
public long getUser_id() {
return user_id;
}
public void setUser_id(long user_id) {
this.user_id = user_id;
}
public String getContactInfo() {
return contactInfo;
}
public void setContactInfo(String contactInfo) {
this.contactInfo = contactInfo;
}
public void convert(User_Address address) {
this.user_id = address.getUser_id();
this.id = address.getId();
this.address = address.getAddress();
this.zipCode = address.getZipCode();
this.contactInfo = address.getContactInfo();
this.userName = address.getUserName();
}
@Override
public String toString() {
return "BoxItUserAddress [id=" + id + ", userName=" + userName + ", address=" + address + ", zipCode=" + zipCode
+ ", contactInfo=" + contactInfo + ", user_id=" + user_id + ", getId()=" + getId() + ", getUserName()="
+ getUserName() + ", getAddress()=" + getAddress() + ", getZipCode()=" + getZipCode()
+ ", getUser_id()=" + getUser_id() + ", getContactInfo()=" + getContactInfo();
}
}
|
/*
* Copyright (C) 2016 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okhttp3.internal.http;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.HttpRetryException;
import java.net.ProtocolException;
import java.net.Proxy;
import java.net.SocketTimeoutException;
import java.security.cert.CertificateException;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSocketFactory;
import okhttp3.Address;
import okhttp3.CertificatePinner;
import okhttp3.Connection;
import okhttp3.HttpUrl;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okhttp3.Route;
import okhttp3.internal.connection.RouteException;
import okhttp3.internal.connection.StreamAllocation;
import okhttp3.internal.http2.ConnectionShutdownException;
import static java.net.HttpURLConnection.HTTP_CLIENT_TIMEOUT;
import static java.net.HttpURLConnection.HTTP_MOVED_PERM;
import static java.net.HttpURLConnection.HTTP_MOVED_TEMP;
import static java.net.HttpURLConnection.HTTP_MULT_CHOICE;
import static java.net.HttpURLConnection.HTTP_PROXY_AUTH;
import static java.net.HttpURLConnection.HTTP_SEE_OTHER;
import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED;
import static okhttp3.internal.Util.closeQuietly;
import static okhttp3.internal.http.StatusLine.HTTP_PERM_REDIRECT;
import static okhttp3.internal.http.StatusLine.HTTP_TEMP_REDIRECT;
/**
* This interceptor recovers from failures and follows redirects as necessary. It may throw an
* {@link IOException} if the call was canceled.
*/
public final class RetryAndFollowUpInterceptor implements Interceptor {
/**
* How many redirects and auth challenges should we attempt? Chrome follows 21 redirects; Firefox,
* curl, and wget follow 20; Safari follows 16; and HTTP/1.0 recommends 5.
*/
private static final int MAX_FOLLOW_UPS = 20;
private final OkHttpClient client;
private final boolean forWebSocket;
private StreamAllocation streamAllocation;
private Object callStackTrace;
private volatile boolean canceled;
public RetryAndFollowUpInterceptor(OkHttpClient client, boolean forWebSocket) {
this.client = client;
this.forWebSocket = forWebSocket;
}
/**
* Immediately closes the socket connection if it's currently held. Use this to interrupt an
* in-flight request from any thread. It's the caller's responsibility to close the request body
* and response body streams; otherwise resources may be leaked.
*
* <p>This method is safe to be called concurrently, but provides limited guarantees. If a
* transport layer connection has been established (such as a HTTP/2 stream) that is terminated.
* Otherwise if a socket connection is being established, that is terminated.
*/
public void cancel() {
canceled = true;
StreamAllocation streamAllocation = this.streamAllocation;
if (streamAllocation != null) streamAllocation.cancel();
}
public boolean isCanceled() {
return canceled;
}
public void setCallStackTrace(Object callStackTrace) {
this.callStackTrace = callStackTrace;
}
public StreamAllocation streamAllocation() {
return streamAllocation;
}
@Override public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
streamAllocation = new StreamAllocation(
client.connectionPool(), createAddress(request.url()), callStackTrace);
int followUpCount = 0;
Response priorResponse = null;
while (true) {
if (canceled) {
streamAllocation.release();
throw new IOException("Canceled");
}
Response response = null;
boolean releaseConnection = true;
try {
response = ((RealInterceptorChain) chain).proceed(request, streamAllocation, null, null);
releaseConnection = false;
} catch (RouteException e) {
// The attempt to connect via a route failed. The request will not have been sent.
if (!recover(e.getLastConnectException(), false, request)) {
throw e.getLastConnectException();
}
releaseConnection = false;
continue;
} catch (IOException e) {
// An attempt to communicate with a server failed. The request may have been sent.
boolean requestSendStarted = !(e instanceof ConnectionShutdownException);
if (!recover(e, requestSendStarted, request)) throw e;
releaseConnection = false;
continue;
} finally {
// We're throwing an unchecked exception. Release any resources.
if (releaseConnection) {
streamAllocation.streamFailed(null);
streamAllocation.release();
}
}
// Attach the prior response if it exists. Such responses never have a body.
if (priorResponse != null) {
response = response.newBuilder()
.priorResponse(priorResponse.newBuilder()
.body(null)
.build())
.build();
}
Request followUp = followUpRequest(response);
if (followUp == null) {
if (!forWebSocket) {
streamAllocation.release();
}
return response;
}
closeQuietly(response.body());
if (++followUpCount > MAX_FOLLOW_UPS) {
streamAllocation.release();
throw new ProtocolException("Too many follow-up requests: " + followUpCount);
}
if (followUp.body() instanceof UnrepeatableRequestBody) {
streamAllocation.release();
throw new HttpRetryException("Cannot retry streamed HTTP body", response.code());
}
if (!sameConnection(response, followUp.url())) {
streamAllocation.release();
streamAllocation = new StreamAllocation(
client.connectionPool(), createAddress(followUp.url()), callStackTrace);
} else if (streamAllocation.codec() != null) {
throw new IllegalStateException("Closing the body of " + response
+ " didn't close its backing stream. Bad interceptor?");
}
request = followUp;
priorResponse = response;
}
}
private Address createAddress(HttpUrl url) {
SSLSocketFactory sslSocketFactory = null;
HostnameVerifier hostnameVerifier = null;
CertificatePinner certificatePinner = null;
if (url.isHttps()) {
sslSocketFactory = client.sslSocketFactory();
hostnameVerifier = client.hostnameVerifier();
certificatePinner = client.certificatePinner();
}
return new Address(url.host(), url.port(), client.dns(), client.socketFactory(),
sslSocketFactory, hostnameVerifier, certificatePinner, client.proxyAuthenticator(),
client.proxy(), client.protocols(), client.connectionSpecs(), client.proxySelector());
}
/**
* Report and attempt to recover from a failure to communicate with a server. Returns true if
* {@code e} is recoverable, or false if the failure is permanent. Requests with a body can only
* be recovered if the body is buffered or if the failure occurred before the request has been
* sent.
*/
private boolean recover(IOException e, boolean requestSendStarted, Request userRequest) {
streamAllocation.streamFailed(e);
// The application layer has forbidden retries.
if (!client.retryOnConnectionFailure()) return false;
// We can't send the request body again.
if (requestSendStarted && userRequest.body() instanceof UnrepeatableRequestBody) return false;
// This exception is fatal.
if (!isRecoverable(e, requestSendStarted)) return false;
// No more routes to attempt.
if (!streamAllocation.hasMoreRoutes()) return false;
// For failure recovery, use the same route selector with a new connection.
return true;
}
private boolean isRecoverable(IOException e, boolean requestSendStarted) {
// If there was a protocol problem, don't recover.
if (e instanceof ProtocolException) {
return false;
}
// If there was an interruption don't recover, but if there was a timeout connecting to a route
// we should try the next route (if there is one).
if (e instanceof InterruptedIOException) {
return e instanceof SocketTimeoutException && !requestSendStarted;
}
// Look for known client-side or negotiation errors that are unlikely to be fixed by trying
// again with a different route.
if (e instanceof SSLHandshakeException) {
// If the problem was a CertificateException from the X509TrustManager,
// do not retry.
if (e.getCause() instanceof CertificateException) {
return false;
}
}
if (e instanceof SSLPeerUnverifiedException) {
// e.g. a certificate pinning error.
return false;
}
// An example of one we might want to retry with a different route is a problem connecting to a
// proxy and would manifest as a standard IOException. Unless it is one we know we should not
// retry, we return true and try a new route.
return true;
}
/**
* Figures out the HTTP request to make in response to receiving {@code userResponse}. This will
* either add authentication headers, follow redirects or handle a client request timeout. If a
* follow-up is either unnecessary or not applicable, this returns null.
*/
private Request followUpRequest(Response userResponse) throws IOException {
if (userResponse == null) throw new IllegalStateException();
Connection connection = streamAllocation.connection();
Route route = connection != null
? connection.route()
: null;
int responseCode = userResponse.code();
final String method = userResponse.request().method();
switch (responseCode) {
case HTTP_PROXY_AUTH:
Proxy selectedProxy = route != null
? route.proxy()
: client.proxy();
if (selectedProxy.type() != Proxy.Type.HTTP) {
throw new ProtocolException("Received HTTP_PROXY_AUTH (407) code while not using proxy");
}
return client.proxyAuthenticator().authenticate(route, userResponse);
case HTTP_UNAUTHORIZED:
return client.authenticator().authenticate(route, userResponse);
case HTTP_PERM_REDIRECT:
case HTTP_TEMP_REDIRECT:
// "If the 307 or 308 status code is received in response to a request other than GET
// or HEAD, the user agent MUST NOT automatically redirect the request"
if (!method.equals("GET") && !method.equals("HEAD")) {
return null;
}
// fall-through
case HTTP_MULT_CHOICE:
case HTTP_MOVED_PERM:
case HTTP_MOVED_TEMP:
case HTTP_SEE_OTHER:
// Does the client allow redirects?
if (!client.followRedirects()) return null;
String location = userResponse.header("Location");
if (location == null) return null;
HttpUrl url = userResponse.request().url().resolve(location);
// Don't follow redirects to unsupported protocols.
if (url == null) return null;
// If configured, don't follow redirects between SSL and non-SSL.
boolean sameScheme = url.scheme().equals(userResponse.request().url().scheme());
if (!sameScheme && !client.followSslRedirects()) return null;
// Most redirects don't include a request body.
Request.Builder requestBuilder = userResponse.request().newBuilder();
if (HttpMethod.permitsRequestBody(method)) {
final boolean maintainBody = HttpMethod.redirectsWithBody(method);
if (HttpMethod.redirectsToGet(method)) {
requestBuilder.method("GET", null);
} else {
RequestBody requestBody = maintainBody ? userResponse.request().body() : null;
requestBuilder.method(method, requestBody);
}
if (!maintainBody) {
requestBuilder.removeHeader("Transfer-Encoding");
requestBuilder.removeHeader("Content-Length");
requestBuilder.removeHeader("Content-Type");
}
}
// When redirecting across hosts, drop all authentication headers. This
// is potentially annoying to the application layer since they have no
// way to retain them.
if (!sameConnection(userResponse, url)) {
requestBuilder.removeHeader("Authorization");
}
return requestBuilder.url(url).build();
case HTTP_CLIENT_TIMEOUT:
// 408's are rare in practice, but some servers like HAProxy use this response code. The
// spec says that we may repeat the request without modifications. Modern browsers also
// repeat the request (even non-idempotent ones.)
if (userResponse.request().body() instanceof UnrepeatableRequestBody) {
return null;
}
return userResponse.request();
default:
return null;
}
}
/**
* Returns true if an HTTP request for {@code followUp} can reuse the connection used by this
* engine.
*/
private boolean sameConnection(Response response, HttpUrl followUp) {
HttpUrl url = response.request().url();
return url.host().equals(followUp.host())
&& url.port() == followUp.port()
&& url.scheme().equals(followUp.scheme());
}
}
|
/*
* Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotwireless.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.iotwireless.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DisassociateAwsAccountFromPartnerAccountResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DisassociateAwsAccountFromPartnerAccountResultJsonUnmarshaller implements
Unmarshaller<DisassociateAwsAccountFromPartnerAccountResult, JsonUnmarshallerContext> {
public DisassociateAwsAccountFromPartnerAccountResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DisassociateAwsAccountFromPartnerAccountResult disassociateAwsAccountFromPartnerAccountResult = new DisassociateAwsAccountFromPartnerAccountResult();
return disassociateAwsAccountFromPartnerAccountResult;
}
private static DisassociateAwsAccountFromPartnerAccountResultJsonUnmarshaller instance;
public static DisassociateAwsAccountFromPartnerAccountResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DisassociateAwsAccountFromPartnerAccountResultJsonUnmarshaller();
return instance;
}
}
|
/*
* MIT License
*
* Copyright (c) 2022 MASES s.r.l.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**************************************************************************************
* <auto-generated>
* This code was generated from a template using JCOReflector
*
* Manual changes to this file may cause unexpected behavior in your application.
* Manual changes to this file will be overwritten if the code is regenerated.
* </auto-generated>
*************************************************************************************/
package microsoft.entityframeworkcore.query;
import org.mases.jcobridge.*;
import org.mases.jcobridge.netreflection.*;
import java.util.ArrayList;
// Import section
import system.linq.expressions.ExpressionVisitor;
/**
* The base .NET class managing Microsoft.EntityFrameworkCore.Query.SqlExpressionVisitor, Microsoft.EntityFrameworkCore.Relational, Version=6.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60.
* <p>
*
* See: <a href="https://docs.microsoft.com/en-us/dotnet/api/Microsoft.EntityFrameworkCore.Query.SqlExpressionVisitor" target="_top">https://docs.microsoft.com/en-us/dotnet/api/Microsoft.EntityFrameworkCore.Query.SqlExpressionVisitor</a>
*/
public class SqlExpressionVisitor extends ExpressionVisitor {
/**
* Fully assembly qualified name: Microsoft.EntityFrameworkCore.Relational, Version=6.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60
*/
public static final String assemblyFullName = "Microsoft.EntityFrameworkCore.Relational, Version=6.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60";
/**
* Assembly name: Microsoft.EntityFrameworkCore.Relational
*/
public static final String assemblyShortName = "Microsoft.EntityFrameworkCore.Relational";
/**
* Qualified class name: Microsoft.EntityFrameworkCore.Query.SqlExpressionVisitor
*/
public static final String className = "Microsoft.EntityFrameworkCore.Query.SqlExpressionVisitor";
static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName);
/**
* The type managed from JCOBridge. See {@link JCType}
*/
public static JCType classType = createType();
static JCEnum enumInstance = null;
JCObject classInstance = null;
static JCType createType() {
try {
String classToCreate = className + ", "
+ (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Creating %s", classToCreate);
JCType typeCreated = bridge.GetType(classToCreate);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Created: %s",
(typeCreated != null) ? typeCreated.toString() : "Returned null value");
return typeCreated;
} catch (JCException e) {
JCOReflector.writeLog(e);
return null;
}
}
void addReference(String ref) throws Throwable {
try {
bridge.AddReference(ref);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
/**
* Internal constructor. Use with caution
*/
public SqlExpressionVisitor(java.lang.Object instance) throws Throwable {
super(instance);
if (instance instanceof JCObject) {
classInstance = (JCObject) instance;
} else
throw new Exception("Cannot manage object, it is not a JCObject");
}
public String getJCOAssemblyName() {
return assemblyFullName;
}
public String getJCOClassName() {
return className;
}
public String getJCOObjectName() {
return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
}
public java.lang.Object getJCOInstance() {
return classInstance;
}
public void setJCOInstance(JCObject instance) {
classInstance = instance;
super.setJCOInstance(classInstance);
}
public JCType getJCOType() {
return classType;
}
/**
* Try to cast the {@link IJCOBridgeReflected} instance into {@link SqlExpressionVisitor}, a cast assert is made to check if types are compatible.
* @param from {@link IJCOBridgeReflected} instance to be casted
* @return {@link SqlExpressionVisitor} instance
* @throws java.lang.Throwable in case of error during cast operation
*/
public static SqlExpressionVisitor cast(IJCOBridgeReflected from) throws Throwable {
NetType.AssertCast(classType, from);
return new SqlExpressionVisitor(from.getJCOInstance());
}
// Constructors section
public SqlExpressionVisitor() throws Throwable {
}
// Methods section
// Properties section
// Instance Events section
}
|
/**
* Copyright (C) 2013 – 2017 SLUB Dresden & Avantgarde Labs GmbH (<code@dswarm.org>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dswarm.converter.schema.test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Optional;
import javax.xml.stream.XMLStreamException;
import com.google.common.io.Files;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmlunit.builder.DiffBuilder;
import org.xmlunit.builder.Input;
import org.xmlunit.diff.Diff;
import org.dswarm.converter.GuicedTest;
import org.dswarm.converter.schema.SolrXMLDataSourceConfigGenerator;
import org.dswarm.persistence.model.schema.Schema;
import org.dswarm.persistence.model.schema.utils.SchemaUtils;
import org.dswarm.persistence.service.schema.SchemaService;
import org.dswarm.persistence.util.DMPPersistenceUtil;
/**
* @author tgaengler
*/
public class SolrXMLDataSourceConfigGeneratorTest extends GuicedTest {
private static final Logger LOG = LoggerFactory.getLogger(SolrXMLDataSourceConfigGeneratorTest.class);
@Test
public void testSolrXMLDataSourceConfigGenerator() throws IOException, XMLStreamException {
internalTestSolrXMLDataSourceConfigGenerator("data-config.xml", Optional.of("http://purl.org/ontology/bibo/Document"), Optional.empty(),
"expected-data-config.xml");
}
@Test
public void testSolrXMLDataSourceConfigGenerator2() throws IOException, XMLStreamException {
internalTestSolrXMLDataSourceConfigGenerator("data-config2.xml", Optional.empty(),
Optional.of("http://data.slub-dresden.de/schemas/Schema-5664ba0e-ccb3-4b71-8823-13281490de30/RecordTypes"),
"expected-data-config2.xml");
}
private void internalTestSolrXMLDataSourceConfigGenerator(final String tempDataConfigFileName, final Optional<String> optionalRecordTag,
final Optional<String> optionalRootAttributePath,
final String exectedDataConfigFileName) throws IOException, XMLStreamException {
final SchemaService schemaService = GuicedTest.injector.getInstance(SchemaService.class);
final Schema fincSolrSchema = schemaService.getObject(SchemaUtils.FINC_SOLR_SCHEMA_UUID);
final String[] fileNameParts = tempDataConfigFileName.split("\\.");
final File file = File.createTempFile(fileNameParts[0], fileNameParts[1]);
final OutputStream fop = new FileOutputStream(file);
SolrXMLDataSourceConfigGenerator
.generateSolrXMLDataSourceConfig(fincSolrSchema, optionalRecordTag, optionalRootAttributePath,
fop);
final String expectedDataConfig = DMPPersistenceUtil.getResourceAsString(exectedDataConfigFileName);
Assert.assertNotNull(expectedDataConfig);
final String actualDataConfig = Files.toString(file, StandardCharsets.UTF_8);
Assert.assertNotNull(actualDataConfig);
// do comparison: check for XML similarity
final Diff xmlDiff = DiffBuilder.compare(Input.fromString(expectedDataConfig))
.withTest(Input.fromString(actualDataConfig)).ignoreWhitespace().checkForSimilar().build();
Assert.assertFalse(xmlDiff.hasDifferences());
}
}
|
package com.force.five.app.domain;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Column;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.io.Serializable;
/**
* An authority (a security role) used by Spring Security.
*/
@Entity
@Table(name = "jhi_authority")
public class Authority implements Serializable {
@NotNull
@Size(min = 0, max = 50)
@Id
@Column(length = 50)
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Authority authority = (Authority) o;
if (name != null ? !name.equals(authority.name) : authority.name != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return name != null ? name.hashCode() : 0;
}
@Override
public String toString() {
return "Authority{" +
"name='" + name + '\'' +
"}";
}
}
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag;
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
/**
* A request to get indices level stats. Allow to enable different stats to be returned.
* <p>
* By default, all statistics are enabled.
* <p>
* All the stats to be returned can be cleared using {@link #clear()}, at which point, specific
* stats can be enabled.
*/
public class IndicesStatsRequest extends BroadcastRequest<IndicesStatsRequest> {
private CommonStatsFlags flags = new CommonStatsFlags();
/**
* Sets all flags to return all stats.
*/
public IndicesStatsRequest all() {
flags.all();
return this;
}
/**
* Clears all stats.
*/
public IndicesStatsRequest clear() {
flags.clear();
return this;
}
/**
* Document types to return stats for. Mainly affects {@link #indexing(boolean)} when
* enabled, returning specific indexing stats for those types.
*/
public IndicesStatsRequest types(String... types) {
flags.types(types);
return this;
}
/**
* Document types to return stats for. Mainly affects {@link #indexing(boolean)} when
* enabled, returning specific indexing stats for those types.
*/
public String[] types() {
return this.flags.types();
}
/**
* Sets specific search group stats to retrieve the stats for. Mainly affects search
* when enabled.
*/
public IndicesStatsRequest groups(String... groups) {
flags.groups(groups);
return this;
}
public String[] groups() {
return this.flags.groups();
}
public IndicesStatsRequest docs(boolean docs) {
flags.set(Flag.Docs, docs);
return this;
}
public boolean docs() {
return flags.isSet(Flag.Docs);
}
public IndicesStatsRequest store(boolean store) {
flags.set(Flag.Store, store);
return this;
}
public boolean store() {
return flags.isSet(Flag.Store);
}
public IndicesStatsRequest indexing(boolean indexing) {
flags.set(Flag.Indexing, indexing);
return this;
}
public boolean indexing() {
return flags.isSet(Flag.Indexing);
}
public IndicesStatsRequest get(boolean get) {
flags.set(Flag.Get, get);
return this;
}
public boolean get() {
return flags.isSet(Flag.Get);
}
public IndicesStatsRequest search(boolean search) {
flags.set(Flag.Search, search);
return this;
}
public boolean search() {
return flags.isSet(Flag.Search);
}
public IndicesStatsRequest merge(boolean merge) {
flags.set(Flag.Merge, merge);
return this;
}
public boolean merge() {
return flags.isSet(Flag.Merge);
}
public IndicesStatsRequest refresh(boolean refresh) {
flags.set(Flag.Refresh, refresh);
return this;
}
public boolean refresh() {
return flags.isSet(Flag.Refresh);
}
public IndicesStatsRequest flush(boolean flush) {
flags.set(Flag.Flush, flush);
return this;
}
public boolean flush() {
return flags.isSet(Flag.Flush);
}
public IndicesStatsRequest warmer(boolean warmer) {
flags.set(Flag.Warmer, warmer);
return this;
}
public boolean warmer() {
return flags.isSet(Flag.Warmer);
}
public IndicesStatsRequest queryCache(boolean queryCache) {
flags.set(Flag.QueryCache, queryCache);
return this;
}
public boolean queryCache() {
return flags.isSet(Flag.QueryCache);
}
public IndicesStatsRequest fieldData(boolean fieldData) {
flags.set(Flag.FieldData, fieldData);
return this;
}
public boolean fieldData() {
return flags.isSet(Flag.FieldData);
}
public IndicesStatsRequest percolate(boolean percolate) {
flags.set(Flag.Percolate, percolate);
return this;
}
public boolean percolate() {
return flags.isSet(Flag.Percolate);
}
public IndicesStatsRequest segments(boolean segments) {
flags.set(Flag.Segments, segments);
return this;
}
public boolean segments() {
return flags.isSet(Flag.Segments);
}
public IndicesStatsRequest fieldDataFields(String... fieldDataFields) {
flags.fieldDataFields(fieldDataFields);
return this;
}
public String[] fieldDataFields() {
return flags.fieldDataFields();
}
public IndicesStatsRequest completion(boolean completion) {
flags.set(Flag.Completion, completion);
return this;
}
public boolean completion() {
return flags.isSet(Flag.Completion);
}
public IndicesStatsRequest completionFields(String... completionDataFields) {
flags.completionDataFields(completionDataFields);
return this;
}
public String[] completionFields() {
return flags.completionDataFields();
}
public IndicesStatsRequest translog(boolean translog) {
flags.set(Flag.Translog, translog);
return this;
}
public boolean translog() {
return flags.isSet(Flag.Translog);
}
public IndicesStatsRequest suggest(boolean suggest) {
flags.set(Flag.Suggest, suggest);
return this;
}
public boolean suggest() {
return flags.isSet(Flag.Suggest);
}
public IndicesStatsRequest requestCache(boolean requestCache) {
flags.set(Flag.RequestCache, requestCache);
return this;
}
public boolean requestCache() {
return flags.isSet(Flag.RequestCache);
}
public IndicesStatsRequest recovery(boolean recovery) {
flags.set(Flag.Recovery, recovery);
return this;
}
public boolean recovery() {
return flags.isSet(Flag.Recovery);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
flags.writeTo(out);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
flags = CommonStatsFlags.readCommonStatsFlags(in);
}
}
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender;
import com.navercorp.pinpoint.common.util.ThreadMXBeanUtils;
import com.navercorp.pinpoint.profiler.sender.BufferedUdpDataSender;
import junit.framework.Assert;
import org.junit.Test;
public class BufferedUdpDataSenderTest {
@Test
public void testSendPacket() throws Exception {
}
@Test
public void testStop_StopFlushThread() throws Exception {
final BufferedUdpDataSender sender = new BufferedUdpDataSender("localhost", 9999, "testUdpSender", 100);
final String flushThreadName = sender.getFlushThreadName();
Assert.assertTrue(ThreadMXBeanUtils.findThreadName(flushThreadName));
sender.stop();
Assert.assertFalse(ThreadMXBeanUtils.findThreadName(flushThreadName));
// ?? finally { send.stop() }
}
}
|
package objectModels.gui;
import com.shaft.gui.element.ElementActions;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.support.ui.WebDriverWait;
public class HerokuFileUploadPage {
private WebDriver driver;
//create constructor
public HerokuFileUploadPage(WebDriver driver) {
this.driver = driver;
}
// identify element locators for file upload page
public By chooseFile = By.id("file-upload");
public By uploadButton = By.id("file-submit");
public By uploadedFiles = By.id("uploaded-files");
// create method for upload file and call load button method
public void uploadFile(String absolutePathOfFile) {
ElementActions.typeFileLocationForUpload(driver, chooseFile, absolutePathOfFile);
clickUploadButton();
}
//create method for upload button
public void clickUploadButton() {
driver.findElement(uploadButton).click();
}
public String getFileName() {
return ElementActions.getText(driver, uploadedFiles);
}
}
|
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.server.reactive;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import javax.servlet.AsyncContext;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.catalina.connector.CoyoteInputStream;
import org.apache.catalina.connector.CoyoteOutputStream;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferFactory;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.util.Assert;
/**
* {@link ServletHttpHandlerAdapter} extension that uses Tomcat APIs for reading
* from the request and writing to the response with {@link ByteBuffer}.
*
* @author Violeta Georgieva
* @since 5.0
* @see org.springframework.web.server.adapter.AbstractReactiveWebInitializer
*/
public class TomcatHttpHandlerAdapter extends ServletHttpHandlerAdapter {
public TomcatHttpHandlerAdapter(HttpHandler httpHandler) {
super(httpHandler);
}
@Override
protected ServerHttpRequest createRequest(HttpServletRequest request, AsyncContext asyncContext)
throws IOException, URISyntaxException {
Assert.notNull(getServletPath(), "servletPath is not initialized.");
return new TomcatServerHttpRequest(request, asyncContext, getServletPath(),
getDataBufferFactory(), getBufferSize());
}
@Override
protected ServerHttpResponse createResponse(HttpServletResponse response, AsyncContext cxt)
throws IOException {
return new TomcatServerHttpResponse(response, cxt, getDataBufferFactory(), getBufferSize());
}
private final class TomcatServerHttpRequest extends ServletServerHttpRequest {
public TomcatServerHttpRequest(HttpServletRequest request, AsyncContext context,
String servletPath, DataBufferFactory factory, int bufferSize)
throws IOException, URISyntaxException {
super(request, context, servletPath, factory, bufferSize);
}
@Override
protected DataBuffer readFromInputStream() throws IOException {
boolean release = true;
int capacity = getBufferSize();
DataBuffer dataBuffer = getDataBufferFactory().allocateBuffer(capacity);
try {
ByteBuffer byteBuffer = dataBuffer.asByteBuffer(0, capacity);
ServletRequest request = getNativeRequest();
int read = ((CoyoteInputStream) request.getInputStream()).read(byteBuffer);
if (logger.isTraceEnabled()) {
logger.trace("read:" + read);
}
if (read > 0) {
dataBuffer.writePosition(read);
release = false;
return dataBuffer;
}
else if (read == -1) {
return EOF_BUFFER;
}
else {
return null;
}
}
finally {
if (release) {
DataBufferUtils.release(dataBuffer);
}
}
}
}
private static final class TomcatServerHttpResponse extends ServletServerHttpResponse {
public TomcatServerHttpResponse(HttpServletResponse response, AsyncContext context,
DataBufferFactory factory, int bufferSize) throws IOException {
super(response, context, factory, bufferSize);
}
@Override
protected int writeToOutputStream(DataBuffer dataBuffer) throws IOException {
ByteBuffer input = dataBuffer.asByteBuffer();
int len = input.remaining();
ServletResponse response = getNativeResponse();
((CoyoteOutputStream) response.getOutputStream()).write(input);
return len;
}
}
}
|
package de.tuberlin.cit.sdn.opendaylight.hydrogen.model.statistic;
import de.tuberlin.cit.sdn.opendaylight.hydrogen.model.node.Node;
import java.util.List;
public class PortNodeStatistic {
public Node node;
public List<PortNodeConnectorStatistic> portStatistic;
}
|
package com.cardsui.example;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import com.fima.cardsui.objects.Card;
public class MyCard extends Card {
public MyCard(String title){
super(title);
}
@Override
public View getCardContent(Context context) {
View view = LayoutInflater.from(context).inflate(R.layout.card_ex, null);
((TextView) view.findViewById(R.id.title)).setText(title);
return view;
}
}
|
/**
*
* :-::-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-+:-+:-+:-+:-++:-:+:-:+:-:+:-:
*
* This file is part of CHiLOⓇ - http://www.cccties.org/en/activities/chilo/
* CHiLOⓇ is a next-generation learning system utilizing ebooks, aiming
* at dissemination of open education.
* Copyright 2015 NPO CCC-TIES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* :-::-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-:+:-+:-+:-+:-+:-++:-:+:-:+:-:+:-:
*
*/
package epub3maker;
import java.util.HashMap;
import java.util.Map;
import org.apache.velocity.VelocityContext;
public class Content {
private Map<String, Object> content;
public Content()
{
content = new HashMap<String, Object>();
}
public void put(String key, Object value)
{
content.put(key, value);
}
public VelocityContext getVelocityContext() {
VelocityContext context = new VelocityContext();
for (Map.Entry<String, Object> e : content.entrySet()) {
if (e.getValue() != null) {
context.put(e.getKey(), e.getValue());
}
}
return context;
}
}
|
package com.example.leagueplanner.services;
import java.time.DayOfWeek;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalAdjuster;
import java.time.temporal.TemporalAdjusters;
import java.time.temporal.TemporalAmount;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import com.example.leagueplanner.domain.Game;
import com.example.leagueplanner.domain.League;
import com.example.leagueplanner.domain.Plan;
import com.example.leagueplanner.domain.Team;
import org.paukov.combinatorics3.Generator;
import org.springframework.stereotype.Service;
@Service
public class PlannerService {
public static final ZonedDateTime LEAGUE_START =
ZonedDateTime.parse("2019-10-03T17:00:00.00+02:00[Europe/Berlin]");
public Plan calculate(League league) {
final List<Team> shuffledTeams = league.getTeams();
Collections.shuffle(shuffledTeams);
final List<List<Team>> rounds =
Generator.combination(shuffledTeams)
.simple(2)
.stream()
.collect(Collectors.toList());
final Plan plan = new Plan();
final TemporalAdjuster nextSaturday = TemporalAdjusters.next(DayOfWeek.SATURDAY);
ZonedDateTime start = LEAGUE_START;
// Direct rounds
for (final List<Team> round : rounds) {
start = start.with(nextSaturday);
Team first = round.get(0);
Team second = round.get(1);
plan.addGame(new Game(first, second, start));
}
start = start.plus(3, ChronoUnit.WEEKS);
// reversed rounds
for (final List<Team> round : rounds) {
Team first = round.get(1);
Team second = round.get(0);
plan.addGame(new Game(first, second, start));
start = start.with(nextSaturday);
}
return plan;
}
}
|
/*
* The MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.influxdb.client.internal;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nonnull;
import com.influxdb.client.InfluxDBClientOptions;
import com.influxdb.utils.Arguments;
import okhttp3.Call;
import okhttp3.Credentials;
import okhttp3.HttpUrl;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
/**
* @author Jakub Bednar (bednar@github) (12/10/2018 12:39)
*/
class AuthenticateInterceptor implements Interceptor {
private static final Logger LOG = Logger.getLogger(InfluxDBClientImpl.class.getName());
private static final List<String> NO_AUTH_ROUTE = Arrays.asList("/api/v2/signin", "/api/v2/signout",
"/api/v2/setup");
private final InfluxDBClientOptions influxDBClientOptions;
private OkHttpClient okHttpClient;
private char[] sessionCookies;
private final AtomicBoolean signout = new AtomicBoolean(false);
AuthenticateInterceptor(@Nonnull final InfluxDBClientOptions influxDBClientOptions) {
Arguments.checkNotNull(influxDBClientOptions, "InfluxDBClientOptions");
this.influxDBClientOptions = influxDBClientOptions;
}
@Override
@Nonnull
public Response intercept(@Nonnull final Chain chain) throws IOException {
Request request = chain.request();
final String requestPath = request.url().encodedPath();
// Is no authentication path?
if (NO_AUTH_ROUTE.stream().anyMatch(requestPath::endsWith)) {
return chain.proceed(request);
}
if (signout.get()) {
LOG.log(Level.WARNING, "Authorization interception failed. Already signed out.");
// Still make the request in order to maintain backward compatibility.
return chain.proceed(request);
}
if (InfluxDBClientOptions.AuthScheme.TOKEN.equals(influxDBClientOptions.getAuthScheme())) {
request = request.newBuilder()
.header("Authorization", "Token " + string(influxDBClientOptions.getToken()))
.build();
} else if (InfluxDBClientOptions.AuthScheme.SESSION.equals(influxDBClientOptions.getAuthScheme())) {
initToken(this.okHttpClient);
if (sessionCookies != null) {
request = request.newBuilder()
.header("Cookie", string(sessionCookies))
.build();
}
}
return chain.proceed(request);
}
/**
* Init the Session token if is {@link InfluxDBClientOptions.AuthScheme#SESSION} used.
* @param okHttpClient the client for signin and signout requests
*/
void initToken(@Nonnull final OkHttpClient okHttpClient) {
Arguments.checkNotNull(okHttpClient, "okHttpClient");
this.okHttpClient = okHttpClient;
if (!InfluxDBClientOptions.AuthScheme.SESSION.equals(influxDBClientOptions.getAuthScheme()) || signout.get()) {
return;
}
if (sessionCookies == null) {
String credentials = Credentials
.basic(influxDBClientOptions.getUsername(), string(influxDBClientOptions.getPassword()));
Request authRequest = new Request.Builder()
.url(buildPath("api/v2/signin"))
.addHeader("Authorization", credentials)
.post(RequestBody.create("application/json", null))
.build();
try (Response authResponse = this.okHttpClient.newCall(authRequest).execute()) {
String cookieHeader = authResponse.headers().get("Set-Cookie");
if (cookieHeader != null) {
sessionCookies = cookieHeader.toCharArray();
}
} catch (IOException e) {
LOG.log(Level.WARNING, "Cannot retrieve the Session token!", e);
}
}
}
/**
* Expire the current session.
*
* @throws IOException if the request could not be executed due to cancellation, a connectivity problem or timeout
* @see Call#execute()
*/
void signout() throws IOException {
if (!InfluxDBClientOptions.AuthScheme.SESSION.equals(influxDBClientOptions.getAuthScheme()) || signout.get()) {
signout.set(true);
return;
}
Request.Builder authRequest = new Request.Builder()
.url(buildPath("api/v2/signout"))
.post(RequestBody.create("application/json", null));
if (sessionCookies != null) {
authRequest.addHeader("Cookie", string(sessionCookies));
}
signout.set(true);
sessionCookies = null;
Response response = okHttpClient.newCall(authRequest.build()).execute();
response.close();
}
@Nonnull
String buildPath(final String buildPath) {
Arguments.checkNotNull(buildPath, "buildPath");
return HttpUrl
.parse(influxDBClientOptions.getUrl())
.newBuilder()
.addEncodedPathSegments(buildPath)
.build()
.toString();
}
@Nonnull
private String string(final char[] password) {
return String.valueOf(password);
}
}
|
package org.multiverse.stms.gamma.transactions.fat;
public class FatVariableLengthGammaTxn_softResetTest extends FatGammaTxn_softResetTest<FatVariableLengthGammaTxn> {
@Override
public FatVariableLengthGammaTxn newTransaction() {
return new FatVariableLengthGammaTxn(stm);
}
}
|
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.actor.typed.javadsl;
import akka.actor.typed.ActorRef;
import akka.actor.typed.Behavior;
import akka.testkit.AkkaSpec;
import akka.actor.testkit.typed.javadsl.TestKitJunitResource;
import akka.actor.testkit.typed.javadsl.TestProbe;
import akka.util.Timeout;
import org.junit.ClassRule;
import org.junit.Test;
import org.scalatest.junit.JUnitSuite;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
public class ActorContextAskTest extends JUnitSuite {
@ClassRule
public static final TestKitJunitResource testKit = new TestKitJunitResource(AkkaSpec.testConf());
static class Ping {
final ActorRef<Pong> respondTo;
public Ping(ActorRef<Pong> respondTo) {
this.respondTo = respondTo;
}
}
static class Pong { }
@Test
public void provideASafeAsk() {
final Behavior<Ping> pingPongBehavior = Behaviors.receive((ActorContext<Ping> context, Ping message) -> {
message.respondTo.tell(new Pong());
return Behaviors.same();
});
final ActorRef<Ping> pingPong = testKit.spawn(pingPongBehavior);
final TestProbe<Object> probe = testKit.createTestProbe();
final Behavior<Object> snitch = Behaviors.setup((ActorContext<Object> context) -> {
context.ask(Pong.class,
pingPong,
Duration.ofSeconds(3),
(ActorRef<Pong> ref) -> new Ping(ref),
(pong, exception) -> {
if (pong != null) return pong;
else return exception;
});
return Behaviors.receiveMessage((Object message) -> {
probe.ref().tell(message);
return Behaviors.same();
});
});
testKit.spawn(snitch);
probe.expectMessageClass(Pong.class);
}
}
|
package com.hq.simpleblog.entity;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* 日志实体类
*
* @author HQ
* @since 2020/4/13 下午8:50
**/
@Data
public class Log implements Serializable {
private Long logId;
private Long userId;
private String userEmail;
private String ip;
private String requestUri;
private String requestMethod;
private String requestParams;
private String responseParams;
private String errorData;
private Boolean deleted;
private Date createDate;
private Date updateDate;
}
|
/**
* Copyright (c) ObjectFabric Inc. All rights reserved.
*
* This file is part of ObjectFabric (objectfabric.com).
*
* ObjectFabric is licensed under the Apache License, Version 2.0, the terms
* of which may be found at http://www.apache.org/licenses/LICENSE-2.0.html.
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
package of4gwt;
import of4gwt.misc.PlatformAdapter;
//==============================================================================
//
// THIS FILE HAS BEEN GENERATED BY OBJECTFABRIC
//
//==============================================================================
/**
* Sparse array to avoid allocating a new array the same length as the TArray each time an
* update is done to it.
*/
class TArrayVersionTObject extends TIndexedNVersion {
private TObject[][] _values;
TType[] _genericParameters = new TType[] { TObject.TYPE };
private static final boolean CAN_BE_TOBJECT = true;
public TArrayVersionTObject(TObject.Version shared, int length) {
super(shared, length);
if (shared == null) {
// Preallocate (C.f. TIndexedNVersion._writes)
int arrayLength = getBits().length;
_values = new TObject[arrayLength][];
}
}
public final TObject[][] getValues() {
return _values;
}
public final void setValues(TObject[][] value) {
_values = value;
}
@Override
public final TType[] getGenericParameters() {
return _genericParameters;
}
public final TObject get(int index) {
if (_values != null) {
int foldedIndex = of4gwt.misc.Bits.getFoldedIntIndexFromIndex(getBits(), index);
if (foldedIndex >= 0) {
TObject[] current = _values[foldedIndex];
if (current != null)
return current[index & of4gwt.misc.Bits.BIT_INDEX_MASK];
}
}
return null;
}
@Override
public final Object getAsObject(int index) {
return get(index);
}
public final void set(int index, TObject value) {
if (_values == null) {
if (value != null) {
int arrayLength = getBits().length;
_values = new TObject[arrayLength][];
}
} else {
if (of4gwt.misc.Debug.ENABLED)
of4gwt.misc.Debug.assertion(_values.length == getBits().length);
}
if (_values != null) {
int folded = of4gwt.misc.Bits.getFoldedIntIndexFromIndex(getBits(), index);
if (_values[folded] == null) {
if (value != null) {
int arrayLength = of4gwt.misc.Bits.BITS_PER_UNIT;
_values[folded] = new TObject[arrayLength];
}
}
if (_values[folded] != null)
_values[folded][index & of4gwt.misc.Bits.BIT_INDEX_MASK] = value;
}
}
@SuppressWarnings("cast")
@Override
public final void setAsObject(int index, Object value) {
set(index, (TObject) value);
}
@Override
public final void reindexed(of4gwt.misc.Bits.Entry[] old) {
if (_values != null) {
TObject[][] oldValues = _values;
int arrayLength = getBits().length;
_values = new TObject[arrayLength][];
for (int i = old.length - 1; i >= 0; i--) {
if (old[i] != null) {
int intIndex = old[i].IntIndex;
int folded = of4gwt.misc.Bits.getFoldedIntIndexFromIntIndex(getBits(), intIndex);
if (of4gwt.misc.Debug.ENABLED)
of4gwt.misc.Debug.assertion(_values[folded] == null);
_values[folded] = oldValues[i];
}
}
}
}
@Override
public TObject.Version merge(TObject.Version target, TObject.Version next, int flags) {
TArrayVersionTObject source = (TArrayVersionTObject) next;
TArrayVersionTObject merged = (TArrayVersionTObject) super.merge(target, next, flags);
merged.merge(source, flags);
return merged;
}
@SuppressWarnings("cast")
private final void merge(TArrayVersionTObject source, int flags) {
boolean skip1 = !(CAN_BE_TOBJECT && isShared()) && _values == null;
if (skip1)
if (getBits() != null && source.getBits() != null)
if (getBits().length != source.getBits().length)
skip1 = false;
if (skip1) {
if (source._values != null) {
if ((flags & MERGE_FLAG_COPY_ARRAYS) != 0) {
int arrayLength = source._values.length;
_values = new TObject[arrayLength][];
PlatformAdapter.arraycopy(source._values, 0, _values, 0, _values.length);
} else
_values = source._values;
}
} else {
of4gwt.misc.Bits.Entry[] writes = source.getBits();
if (writes != null) {
for (int i = writes.length - 1; i >= 0; i--) {
if (writes[i] != null && writes[i].Value != 0) {
if (_values == null) {
int arrayLength = getBits().length;
_values = new TObject[arrayLength][];
}
int folded = of4gwt.misc.Bits.getFoldedIntIndexFromIntIndex(getBits(), writes[i].IntIndex);
if (CAN_BE_TOBJECT && isShared()) {
if (_values[folded] == null) {
int arrayLength = of4gwt.misc.Bits.BITS_PER_UNIT;
_values[folded] = new TObject[arrayLength];
}
Object[] m = (Object[]) (Object) _values[folded];
Object[] s = (Object[]) (Object) (source._values != null ? source._values[i] : null);
mergeObjects(m, writes[i], s);
} else {
boolean skip2 = false;
if ((flags & MERGE_FLAG_COPY_ARRAY_ELEMENTS) == 0) {
skip2 = _values[folded] == null;
if (!skip2) // All overwritten
skip2 = writes[i].Value == -1 && source._values != null && source._values[i] != null;
} else if (_values[folded] == null) {
int arrayLength = of4gwt.misc.Bits.BITS_PER_UNIT;
_values[folded] = new TObject[arrayLength];
}
if (skip2)
_values[folded] = source._values != null ? source._values[i] : null;
else
merge(_values[folded], writes[i], source._values != null ? source._values[i] : null);
}
}
}
}
}
if (of4gwt.misc.Debug.ENABLED)
checkInvariants();
}
private final void merge(TObject[] merged, of4gwt.misc.Bits.Entry writes, TObject[] source) {
for (int i = of4gwt.misc.Bits.BITS_PER_UNIT - 1; i >= 0; i--)
if (of4gwt.misc.Bits.get(writes.Value, i))
merged[i] = source != null ? source[i] : null;
}
//
@Override
public final void writeWrite(Writer writer, int index) {
if (writer.interrupted())
writer.resume();
writer.writeTObject(get(index));
if (writer.interrupted()) {
writer.interrupt(null);
return;
}
}
@Override
public final void readWrite(Reader reader, int index) {
if (reader.interrupted())
reader.resume();
TObject object = reader.readTObject();
if (reader.interrupted()) {
reader.interrupt(null);
return;
}
set(index, object);
}
//
@Override
public TObject.Version createVersion() {
return new TArrayVersionTObject(this, length());
}
@Override
public int getClassId() {
if (of4gwt.misc.Debug.ENABLED)
of4gwt.misc.Debug.assertion(length() >= 0);
return -length() - 1;
}
//
@SuppressWarnings("cast")
@Override
public void checkInvariants_() {
super.checkInvariants_();
if (getValues() != null) {
of4gwt.misc.Debug.assertion(getValues().length == getBits().length);
for (int i = 0; i < getValues().length; i++) {
if (getValues()[i] != null) {
for (int t = 0; t < getValues()[i].length; t++) {
TObject value = getValues()[i][t];
if (value != null) {
of4gwt.misc.Debug.assertion(of4gwt.misc.Bits.get(getBits()[i].Value, t));
if (isShared())
of4gwt.misc.Debug.assertion(!(((Object) value) instanceof UserTObject));
else
of4gwt.misc.Debug.assertion(!(((Object) value) instanceof TObject.Version));
}
}
}
}
}
}
}
// End (for .NET)
|
/*
* Copyright 2014 Vitaly Litvak (vitavaque@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package su.litvak.chromecast.api.v2;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
final class FixtureHelper {
private FixtureHelper() {
}
static String fixtureAsString(final String res) throws IOException {
final InputStream is = FixtureHelper.class.getResourceAsStream(res);
try {
final BufferedReader reader = new BufferedReader(new InputStreamReader(is));
final StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
return sb.toString();
} finally {
is.close();
}
}
}
|
package no.nav.security.token.support.core.exceptions;
public class IssuerConfigurationException extends RuntimeException {
public IssuerConfigurationException(String message) {
this(message,null);
}
public IssuerConfigurationException(String message, Throwable cause) {
super(message, cause);
}
}
|
package springboot.dao;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Component;
import springboot.modal.vo.OptionVo;
import springboot.modal.vo.OptionVoExample;
import java.util.List;
@Component
@Mapper
public interface OptionVoMapper {
long countByExample(OptionVoExample example);
int deleteByExample(OptionVoExample example);
int deleteByPrimaryKey(String name);
int insert(OptionVo record);
int insertSelective(OptionVo record);
List<OptionVo> selectByExample(OptionVoExample example);
OptionVo selectByPrimaryKey(String name);
int updateByExampleSelective(@Param("record") OptionVo record, @Param("example") OptionVoExample example);
int updateByExample(@Param("record") OptionVo record, @Param("example") OptionVoExample example);
int updateByPrimaryKeySelective(OptionVo record);
int updateByPrimaryKey(OptionVo record);
/**
* 批量保存
* @param optionVos list
* @return 保存的个数
*/
int insertOptions(List<OptionVo> optionVos);
}
|
package com.app.dsr.simpplrassignment.network.model;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
public class PlaylistFollowPrivacy implements Parcelable {
@SerializedName("public")
public Boolean is_public;
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(this.is_public);
}
public PlaylistFollowPrivacy() {
}
protected PlaylistFollowPrivacy(Parcel in) {
this.is_public = (Boolean) in.readValue(Boolean.class.getClassLoader());
}
public static final Creator<PlaylistFollowPrivacy> CREATOR = new Creator<PlaylistFollowPrivacy>() {
public PlaylistFollowPrivacy createFromParcel(Parcel source) {
return new PlaylistFollowPrivacy(source);
}
public PlaylistFollowPrivacy[] newArray(int size) {
return new PlaylistFollowPrivacy[size];
}
};
}
|
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.initialization;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Key;
import io.druid.guice.ExtensionsConfig;
import io.druid.guice.GuiceInjectors;
import io.druid.guice.JsonConfigProvider;
import io.druid.guice.annotations.Self;
import io.druid.java.util.common.ISE;
import io.druid.server.DruidNode;
import io.druid.server.initialization.ServerConfig;
import org.junit.Assert;
import org.junit.FixMethodOrder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runners.MethodSorters;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class InitializationTest
{
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void test01InitialModulesEmpty() throws Exception
{
Initialization.clearLoadedImplementations();
Assert.assertEquals(
"Initial set of loaded modules must be empty",
0,
Initialization.getLoadedImplementations(DruidModule.class).size()
);
}
@Test
public void test02MakeStartupInjector() throws Exception
{
Injector startupInjector = GuiceInjectors.makeStartupInjector();
Assert.assertNotNull(startupInjector);
Assert.assertNotNull(startupInjector.getInstance(ObjectMapper.class));
}
@Test
public void test03ClassLoaderExtensionsLoading()
{
Injector startupInjector = GuiceInjectors.makeStartupInjector();
Function<DruidModule, String> fnClassName = new Function<DruidModule, String>()
{
@Nullable
@Override
public String apply(@Nullable DruidModule input)
{
return input.getClass().getCanonicalName();
}
};
Assert.assertFalse(
"modules does not contain TestDruidModule",
Collections2.transform(Initialization.getLoadedImplementations(DruidModule.class), fnClassName)
.contains("io.druid.initialization.InitializationTest.TestDruidModule")
);
Collection<DruidModule> modules = Initialization.getFromExtensions(
startupInjector.getInstance(ExtensionsConfig.class),
DruidModule.class
);
Assert.assertTrue(
"modules contains TestDruidModule",
Collections2.transform(modules, fnClassName)
.contains("io.druid.initialization.InitializationTest.TestDruidModule")
);
}
@Test
public void test04DuplicateClassLoaderExtensions() throws Exception
{
final File extensionDir = temporaryFolder.newFolder();
Initialization.getLoadersMap().put(extensionDir, (URLClassLoader) Initialization.class.getClassLoader());
Collection<DruidModule> modules = Initialization.getFromExtensions(new ExtensionsConfig(), DruidModule.class);
Set<String> loadedModuleNames = Sets.newHashSet();
for (DruidModule module : modules) {
Assert.assertFalse("Duplicate extensions are loaded", loadedModuleNames.contains(module.getClass().getName()));
loadedModuleNames.add(module.getClass().getName());
}
Initialization.getLoadersMap().clear();
}
@Test
public void test05MakeInjectorWithModules() throws Exception
{
Injector startupInjector = GuiceInjectors.makeStartupInjector();
Injector injector = Initialization.makeInjectorWithModules(
startupInjector, ImmutableList.<com.google.inject.Module>of(
new com.google.inject.Module()
{
@Override
public void configure(Binder binder)
{
JsonConfigProvider.bindInstance(
binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig())
);
}
}
)
);
Assert.assertNotNull(injector);
}
@Test
public void test06GetClassLoaderForExtension() throws IOException
{
final File some_extension_dir = temporaryFolder.newFolder();
final File a_jar = new File(some_extension_dir, "a.jar");
final File b_jar = new File(some_extension_dir, "b.jar");
final File c_jar = new File(some_extension_dir, "c.jar");
a_jar.createNewFile();
b_jar.createNewFile();
c_jar.createNewFile();
final URLClassLoader loader = Initialization.getClassLoaderForExtension(some_extension_dir);
final URL[] expectedURLs = new URL[]{a_jar.toURI().toURL(), b_jar.toURI().toURL(), c_jar.toURI().toURL()};
final URL[] actualURLs = loader.getURLs();
Arrays.sort(
actualURLs, new Comparator<URL>()
{
@Override
public int compare(URL o1, URL o2)
{
return o1.getPath().compareTo(o2.getPath());
}
}
);
Assert.assertArrayEquals(expectedURLs, actualURLs);
}
@Test
public void testGetLoadedModules()
{
Collection<DruidModule> modules = Initialization.getLoadedImplementations(DruidModule.class);
HashSet<DruidModule> moduleSet = new HashSet<>(modules);
Collection<DruidModule> loadedModules = Initialization.getLoadedImplementations(DruidModule.class);
Assert.assertEquals("Set from loaded modules #1 should be same!", modules.size(), loadedModules.size());
Assert.assertEquals("Set from loaded modules #1 should be same!", moduleSet, new HashSet<>(loadedModules));
Collection<DruidModule> loadedModules2 = Initialization.getLoadedImplementations(DruidModule.class);
Assert.assertEquals("Set from loaded modules #2 should be same!", modules.size(), loadedModules2.size());
Assert.assertEquals("Set from loaded modules #2 should be same!", moduleSet, new HashSet<>(loadedModules2));
}
@Test
public void testGetExtensionFilesToLoad_non_exist_extensions_dir() throws IOException
{
final File tmpDir = temporaryFolder.newFolder();
Assert.assertTrue("could not create missing folder", !tmpDir.exists() || tmpDir.delete());
Assert.assertArrayEquals(
"Non-exist root extensionsDir should return an empty array of File",
new File[]{},
Initialization.getExtensionFilesToLoad(new ExtensionsConfig()
{
@Override
public String getDirectory()
{
return tmpDir.getAbsolutePath();
}
})
);
}
@Test(expected = ISE.class)
public void testGetExtensionFilesToLoad_wrong_type_extensions_dir() throws IOException
{
final File extensionsDir = temporaryFolder.newFile();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public String getDirectory()
{
return extensionsDir.getAbsolutePath();
}
};
Initialization.getExtensionFilesToLoad(config);
}
@Test
public void testGetExtensionFilesToLoad_empty_extensions_dir() throws IOException
{
final File extensionsDir = temporaryFolder.newFolder();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public String getDirectory()
{
return extensionsDir.getAbsolutePath();
}
};
Assert.assertArrayEquals(
"Empty root extensionsDir should return an empty array of File",
new File[]{},
Initialization.getExtensionFilesToLoad(config)
);
}
/**
* If druid.extension.load is not specified, Initialization.getExtensionFilesToLoad is supposed to return all the
* extension folders under root extensions directory.
*/
@Test
public void testGetExtensionFilesToLoad_null_load_list() throws IOException
{
final File extensionsDir = temporaryFolder.newFolder();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public String getDirectory()
{
return extensionsDir.getAbsolutePath();
}
};
final File mysql_metadata_storage = new File(extensionsDir, "mysql-metadata-storage");
final File druid_kafka_eight = new File(extensionsDir, "druid-kafka-eight");
mysql_metadata_storage.mkdir();
druid_kafka_eight.mkdir();
final File[] expectedFileList = new File[]{druid_kafka_eight, mysql_metadata_storage};
final File[] actualFileList = Initialization.getExtensionFilesToLoad(config);
Arrays.sort(actualFileList);
Assert.assertArrayEquals(expectedFileList, actualFileList);
}
/**
* druid.extension.load is specified, Initialization.getExtensionFilesToLoad is supposed to return all the extension
* folders appeared in the load list.
*/
@Test
public void testGetExtensionFilesToLoad_with_load_list() throws IOException
{
final File extensionsDir = temporaryFolder.newFolder();
final File absolutePathExtension = temporaryFolder.newFolder();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public List<String> getLoadList()
{
return Arrays.asList("mysql-metadata-storage", "druid-kafka-eight", absolutePathExtension.getAbsolutePath());
}
@Override
public String getDirectory()
{
return extensionsDir.getAbsolutePath();
}
};
final File mysql_metadata_storage = new File(extensionsDir, "mysql-metadata-storage");
final File druid_kafka_eight = new File(extensionsDir, "druid-kafka-eight");
final File random_extension = new File(extensionsDir, "random-extensions");
mysql_metadata_storage.mkdir();
druid_kafka_eight.mkdir();
random_extension.mkdir();
final File[] expectedFileList = new File[]{mysql_metadata_storage, druid_kafka_eight, absolutePathExtension};
final File[] actualFileList = Initialization.getExtensionFilesToLoad(config);
Assert.assertArrayEquals(expectedFileList, actualFileList);
}
/**
* druid.extension.load is specified, but contains an extension that is not prepared under root extension directory.
* Initialization.getExtensionFilesToLoad is supposed to throw ISE.
*/
@Test(expected = ISE.class)
public void testGetExtensionFilesToLoad_with_non_exist_item_in_load_list() throws IOException
{
final File extensionsDir = temporaryFolder.newFolder();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public List<String> getLoadList()
{
return Arrays.asList("mysql-metadata-storage", "druid-kafka-eight");
}
@Override
public String getDirectory()
{
return extensionsDir.getAbsolutePath();
}
};
final File druid_kafka_eight = new File(extensionsDir, "druid-kafka-eight");
final File random_extension = new File(extensionsDir, "random-extensions");
druid_kafka_eight.mkdir();
random_extension.mkdir();
Initialization.getExtensionFilesToLoad(config);
}
@Test(expected = ISE.class)
public void testGetHadoopDependencyFilesToLoad_wrong_type_root_hadoop_depenencies_dir() throws IOException
{
final File rootHadoopDependenciesDir = temporaryFolder.newFile();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public String getHadoopDependenciesDir()
{
return rootHadoopDependenciesDir.getAbsolutePath();
}
};
Initialization.getHadoopDependencyFilesToLoad(ImmutableList.<String>of(), config);
}
@Test(expected = ISE.class)
public void testGetHadoopDependencyFilesToLoad_non_exist_version_dir() throws IOException
{
final File rootHadoopDependenciesDir = temporaryFolder.newFolder();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public String getHadoopDependenciesDir()
{
return rootHadoopDependenciesDir.getAbsolutePath();
}
};
final File hadoopClient = new File(rootHadoopDependenciesDir, "hadoop-client");
hadoopClient.mkdir();
Initialization.getHadoopDependencyFilesToLoad(ImmutableList.of("org.apache.hadoop:hadoop-client:2.3.0"), config);
}
@Test
public void testGetHadoopDependencyFilesToLoad_with_hadoop_coordinates() throws IOException
{
final File rootHadoopDependenciesDir = temporaryFolder.newFolder();
final ExtensionsConfig config = new ExtensionsConfig()
{
@Override
public String getHadoopDependenciesDir()
{
return rootHadoopDependenciesDir.getAbsolutePath();
}
};
final File hadoopClient = new File(rootHadoopDependenciesDir, "hadoop-client");
final File versionDir = new File(hadoopClient, "2.3.0");
hadoopClient.mkdir();
versionDir.mkdir();
final File[] expectedFileList = new File[]{versionDir};
final File[] actualFileList = Initialization.getHadoopDependencyFilesToLoad(
ImmutableList.of(
"org.apache.hadoop:hadoop-client:2.3.0"
), config
);
Assert.assertArrayEquals(expectedFileList, actualFileList);
}
@Test
public void testGetURLsForClasspath() throws Exception
{
File tmpDir1 = temporaryFolder.newFolder();
File tmpDir2 = temporaryFolder.newFolder();
File tmpDir3 = temporaryFolder.newFolder();
File tmpDir1a = new File(tmpDir1, "a.jar");
tmpDir1a.createNewFile();
File tmpDir1b = new File(tmpDir1, "b.jar");
tmpDir1b.createNewFile();
new File(tmpDir1, "note1.txt").createNewFile();
File tmpDir2c = new File(tmpDir2, "c.jar");
tmpDir2c.createNewFile();
File tmpDir2d = new File(tmpDir2, "d.jar");
tmpDir2d.createNewFile();
File tmpDir2e = new File(tmpDir2, "e.JAR");
tmpDir2e.createNewFile();
new File(tmpDir2, "note2.txt").createNewFile();
String cp = tmpDir1.getAbsolutePath() + File.separator + "*"
+ File.pathSeparator
+ tmpDir3.getAbsolutePath()
+ File.pathSeparator
+ tmpDir2.getAbsolutePath() + File.separator + "*";
// getURLsForClasspath uses listFiles which does NOT guarantee any ordering for the name strings.
List<URL> urLsForClasspath = Initialization.getURLsForClasspath(cp);
Assert.assertEquals(Sets.newHashSet(tmpDir1a.toURI().toURL(), tmpDir1b.toURI().toURL()),
Sets.newHashSet(urLsForClasspath.subList(0, 2)));
Assert.assertEquals(tmpDir3.toURI().toURL(), urLsForClasspath.get(2));
Assert.assertEquals(Sets.newHashSet(tmpDir2c.toURI().toURL(), tmpDir2d.toURI().toURL(), tmpDir2e.toURI().toURL()),
Sets.newHashSet(urLsForClasspath.subList(3, 6)));
}
@Test
public void testExtensionsWithSameDirName() throws Exception
{
final String extensionName = "some_extension";
final File tmpDir1 = temporaryFolder.newFolder();
final File tmpDir2 = temporaryFolder.newFolder();
final File extension1 = new File(tmpDir1, extensionName);
final File extension2 = new File(tmpDir2, extensionName);
Assert.assertTrue(extension1.mkdir());
Assert.assertTrue(extension2.mkdir());
final File jar1 = new File(extension1, "jar1.jar");
final File jar2 = new File(extension2, "jar2.jar");
Assert.assertTrue(jar1.createNewFile());
Assert.assertTrue(jar2.createNewFile());
final ClassLoader classLoader1 = Initialization.getClassLoaderForExtension(extension1);
final ClassLoader classLoader2 = Initialization.getClassLoaderForExtension(extension2);
Assert.assertArrayEquals(new URL[]{jar1.toURL()}, ((URLClassLoader) classLoader1).getURLs());
Assert.assertArrayEquals(new URL[]{jar2.toURL()}, ((URLClassLoader) classLoader2).getURLs());
}
public static class TestDruidModule implements DruidModule
{
@Override
public List<? extends Module> getJacksonModules()
{
return ImmutableList.of();
}
@Override
public void configure(Binder binder)
{
// Do nothing
}
}
}
|
package tech.rpairo.energyseries.retrofit.response;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import tech.rpairo.energyseries.gson.GsonKeys;
import tech.rpairo.energyseries.model.Serie;
/**
* Created by Raul on 24/6/16.
*/
public class ResponseSeries {
//region Variables
@SerializedName(GsonKeys.RESULTS)
private ArrayList<Serie> series;
//endregion
//region Getters & Setters
public ArrayList<Serie> getSeries() {
return this.series;
}
//endregion
}
|
package awe.idea.com.batch.controller;
import awe.idea.com.batch.service.BatchScheduleJobService;
import awe.idea.com.common.annotation.SysLog;
import awe.idea.com.common.utils.PageUtils;
import awe.idea.com.common.utils.Query;
import awe.idea.com.common.utils.R;
import awe.idea.com.common.validators.ValidatorUtils;
import awe.idea.com.service.entity.ScheduleJobEntity;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* 定时任务
*
* @author chenshun
* @email sunlightcs@gmail.com
* @date 2016年11月28日 下午2:16:40
*/
@RestController
@RequestMapping("/sys/schedulejob")
public class ScheduleJobController extends AbstractController {
@Autowired
private BatchScheduleJobService scheduleJobService;
/**
* 定时任务列表
*/
@RequestMapping("/list")
@RequiresPermissions("schedulejob:list")
public R list(@RequestParam Map<String, Object> params){
//查询列表数据
Query query = new Query(params);
List<ScheduleJobEntity> jobList = scheduleJobService.queryList(query);
int total = scheduleJobService.queryTotal(query);
PageUtils pageUtil = new PageUtils(jobList, total, query.getLimit(), query.getPage());
return R.ok().put("page", pageUtil);
}
/**
* 定时任务信息
*/
@RequestMapping("/info/{jobId}")
@RequiresPermissions("schedulejob:info")
public R info(@PathVariable("jobId") Long jobId){
ScheduleJobEntity schedule = scheduleJobService.queryObject(jobId);
return R.ok().put("schedule", schedule);
}
/**
* 保存定时任务
*/
@SysLog("保存定时任务")
@RequestMapping("/save")
@RequiresPermissions("schedulejob:save")
public R save(@RequestBody ScheduleJobEntity scheduleJob){
ValidatorUtils.validateEntity(scheduleJob);
scheduleJobService.save(scheduleJob);
return R.ok();
}
/**
* 修改定时任务
*/
@SysLog("修改定时任务")
@RequestMapping("/update")
@RequiresPermissions("schedulejob:update")
public R update(@RequestBody ScheduleJobEntity scheduleJob){
ValidatorUtils.validateEntity(scheduleJob);
scheduleJobService.update(scheduleJob);
return R.ok();
}
/**
* 删除定时任务
*/
@SysLog("删除定时任务")
@RequestMapping("/delete")
@RequiresPermissions("schedulejob:delete")
public R delete(@RequestBody Long[] jobIds){
scheduleJobService.deleteBatch(jobIds);
return R.ok();
}
/**
* 立即执行任务
*/
@SysLog("立即执行任务")
@RequestMapping("/run")
@RequiresPermissions("schedulejob:run")
public R run(@RequestBody Long[] jobIds){
scheduleJobService.run(jobIds);
return R.ok();
}
/**
* 暂停定时任务
*/
@SysLog("暂停定时任务")
@RequestMapping("/pause")
@RequiresPermissions("schedulejob:pause")
public R pause(@RequestBody Long[] jobIds){
scheduleJobService.pause(jobIds);
return R.ok();
}
/**
* 恢复定时任务
*/
@SysLog("恢复定时任务")
@RequestMapping("/resume")
@RequiresPermissions("schedulejob:resume")
public R resume(@RequestBody Long[] jobIds){
scheduleJobService.resume(jobIds);
return R.ok();
}
}
|
package pl.bmstefanski.website;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class WebConfiguration implements WebMvcConfigurer {
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**")
.allowedOrigins("http://localhost:8080")
.allowedMethods("GET", "POST", "HEAD", "OPTIONS", "PUT", "PATCH", "DELETE")
.allowCredentials(true);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.metadata.iso.acquisition;
import java.util.Date;
import java.util.Collection;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.opengis.metadata.Identifier;
import org.opengis.metadata.acquisition.Plan;
import org.opengis.metadata.acquisition.Priority;
import org.opengis.metadata.acquisition.RequestedDate;
import org.opengis.metadata.acquisition.Requirement;
import org.opengis.metadata.citation.Citation;
import org.opengis.metadata.citation.ResponsibleParty;
import org.apache.sis.metadata.iso.ISOMetadata;
import static org.apache.sis.internal.metadata.MetadataUtilities.toDate;
import static org.apache.sis.internal.metadata.MetadataUtilities.toMilliseconds;
/**
* Requirement to be satisfied by the planned data acquisition.
* The following properties are mandatory or conditional (i.e. mandatory under some circumstances)
* in a well-formed metadata according ISO 19115:
*
* <div class="preformat">{@code MI_Requirement}
* {@code ├─identifier………………………………………………………} Unique name, or code, for the requirement.
* {@code │ └─code……………………………………………………………} Alphanumeric value identifying an instance in the namespace.
* {@code ├─requestor…………………………………………………………} Origin of requirement.
* {@code │ ├─party…………………………………………………………} Information about the parties.
* {@code │ │ └─name…………………………………………………} Name of the party.
* {@code │ └─role……………………………………………………………} Function performed by the responsible party.
* {@code ├─recipient…………………………………………………………} Person(s), or body(ies), to receive results of requirement.
* {@code ├─priority……………………………………………………………} Relative ordered importance, or urgency, of the requirement.
* {@code ├─requestedDate………………………………………………} Required or preferred acquisition date and time.
* {@code │ ├─requestedDateOfCollection……} Preferred date and time of collection.
* {@code │ └─latestAcceptableDate…………………} Latest date and time collection must be completed.
* {@code └─expiryDate………………………………………………………} Date and time after which collection is no longer valid.</div>
*
* <p><b>Limitations:</b></p>
* <ul>
* <li>Instances of this class are not synchronized for multi-threading.
* Synchronization, if needed, is caller's responsibility.</li>
* <li>Serialized objects of this class are not guaranteed to be compatible with future Apache SIS releases.
* Serialization support is appropriate for short term storage or RMI between applications running the
* same version of Apache SIS. For long term storage, use {@link org.apache.sis.xml.XML} instead.</li>
* </ul>
*
* @author Cédric Briançon (Geomatys)
* @author Martin Desruisseaux (Geomatys)
* @version 1.0
* @since 0.3
* @module
*/
@XmlType(name = "MI_Requirement_Type", propOrder = {
"citation",
"identifier",
"requestors",
"recipients",
"priority",
"requestedDate",
"expiryDate",
"satisfiedPlans"
})
@XmlRootElement(name = "MI_Requirement")
public class DefaultRequirement extends ISOMetadata implements Requirement {
/**
* Serial number for inter-operability with different versions.
*/
private static final long serialVersionUID = -4987984804974769238L;
/**
* Identification of reference or guidance material for the requirement.
*/
private Citation citation;
/**
* Origin of requirement.
*/
private Collection<ResponsibleParty> requestors;
/**
* Person(s), or body(ies), to receive results of requirement.
*/
private Collection<ResponsibleParty> recipients;
/**
* Relative ordered importance, or urgency, of the requirement.
*/
private Priority priority;
/**
* Required or preferred acquisition date and time.
*/
private RequestedDate requestedDate;
/**
* Date and time after which collection is no longer valid,
* or {@link Long#MIN_VALUE} if none.
*/
private long expiryDate = Long.MIN_VALUE;
/**
* Plan that identifies solution to satisfy the requirement.
*/
private Collection<Plan> satisfiedPlans;
/**
* Constructs an initially empty requirement.
*/
public DefaultRequirement() {
}
/**
* Constructs a new instance initialized with the values from the specified metadata object.
* This is a <cite>shallow</cite> copy constructor, since the other metadata contained in the
* given object are not recursively copied.
*
* @param object the metadata to copy values from, or {@code null} if none.
*
* @see #castOrCopy(Requirement)
*/
public DefaultRequirement(final Requirement object) {
super(object);
if (object != null) {
citation = object.getCitation();
identifiers = singleton(object.getIdentifier(), Identifier.class);
requestors = copyCollection(object.getRequestors(), ResponsibleParty.class);
recipients = copyCollection(object.getRecipients(), ResponsibleParty.class);
priority = object.getPriority();
requestedDate = object.getRequestedDate();
expiryDate = toMilliseconds(object.getExpiryDate());
satisfiedPlans = copyCollection(object.getSatisfiedPlans(), Plan.class);
}
}
/**
* Returns a SIS metadata implementation with the values of the given arbitrary implementation.
* This method performs the first applicable action in the following choices:
*
* <ul>
* <li>If the given object is {@code null}, then this method returns {@code null}.</li>
* <li>Otherwise if the given object is already an instance of
* {@code DefaultRequirement}, then it is returned unchanged.</li>
* <li>Otherwise a new {@code DefaultRequirement} instance is created using the
* {@linkplain #DefaultRequirement(Requirement) copy constructor}
* and returned. Note that this is a <cite>shallow</cite> copy operation, since the other
* metadata contained in the given object are not recursively copied.</li>
* </ul>
*
* @param object the object to get as a SIS implementation, or {@code null} if none.
* @return a SIS implementation containing the values of the given object (may be the
* given object itself), or {@code null} if the argument was null.
*/
public static DefaultRequirement castOrCopy(final Requirement object) {
if (object == null || object instanceof DefaultRequirement) {
return (DefaultRequirement) object;
}
return new DefaultRequirement(object);
}
/**
* Returns the identification of reference or guidance material for the requirement.
* {@code null} if unspecified.
*
* @return identification of reference or guidance material, or {@code null}.
*/
@Override
@XmlElement(name = "citation")
public Citation getCitation() {
return citation;
}
/**
* Sets the identification of reference or guidance material for the requirement.
*
* @param newValue the new citation value.
*/
public void setCitation(final Citation newValue) {
checkWritePermission(citation);
citation = newValue;
}
/**
* Returns the unique name, or code, for the requirement.
*
* @return unique name or code, or {@code null}.
*/
@Override
@XmlElement(name = "identifier", required = true)
public Identifier getIdentifier() {
return super.getIdentifier();
}
/**
* Sets the unique name, or code, for the requirement.
*
* @param newValue the new identifier value.
*/
@Override
public void setIdentifier(final Identifier newValue) {
super.setIdentifier(newValue);
}
/**
* Returns the origin of requirement.
*
* <div class="warning"><b>Upcoming API change — generalization</b><br>
* As of ISO 19115:2014, {@code ResponsibleParty} is replaced by the {@code Responsibility} parent interface.
* This change will be tentatively applied in GeoAPI 4.0.
* </div>
*
* @return origin of requirement.
*/
@Override
@XmlElement(name = "requestor", required = true)
public Collection<ResponsibleParty> getRequestors() {
return requestors = nonNullCollection(requestors, ResponsibleParty.class);
}
/**
* Sets the origin of requirement.
*
* <div class="warning"><b>Upcoming API change — generalization</b><br>
* As of ISO 19115:2014, {@code ResponsibleParty} is replaced by the {@code Responsibility} parent interface.
* This change will be tentatively applied in GeoAPI 4.0.
* </div>
*
* @param newValues the new requestors values.
*/
public void setRequestors(final Collection<? extends ResponsibleParty> newValues) {
requestors = writeCollection(newValues, requestors, ResponsibleParty.class);
}
/**
* Returns the person(s), or body(ies), to receive results of requirement.
*
* <div class="warning"><b>Upcoming API change — generalization</b><br>
* As of ISO 19115:2014, {@code ResponsibleParty} is replaced by the {@code Responsibility} parent interface.
* This change will be tentatively applied in GeoAPI 4.0.
* </div>
*
* @return person(s), or body(ies), to receive results.
*/
@Override
@XmlElement(name = "recipient", required = true)
public Collection<ResponsibleParty> getRecipients() {
return recipients = nonNullCollection(recipients, ResponsibleParty.class);
}
/**
* Sets the Person(s), or body(ies), to receive results of requirement.
*
* <div class="warning"><b>Upcoming API change — generalization</b><br>
* As of ISO 19115:2014, {@code ResponsibleParty} is replaced by the {@code Responsibility} parent interface.
* This change will be tentatively applied in GeoAPI 4.0.
* </div>
*
* @param newValues the new recipients values.
*/
public void setRecipients(final Collection<? extends ResponsibleParty> newValues) {
recipients = writeCollection(newValues, recipients, ResponsibleParty.class);
}
/**
* Returns the relative ordered importance, or urgency, of the requirement.
*
* @return relative ordered importance, or urgency, or {@code null}.
*/
@Override
@XmlElement(name = "priority", required = true)
public Priority getPriority() {
return priority;
}
/**
* Sets the relative ordered importance, or urgency, of the requirement.
*
* @param newValue the new priority value.
*/
public void setPriority(final Priority newValue) {
checkWritePermission(priority);
priority = newValue;
}
/**
* Returns the required or preferred acquisition date and time.
*
* @return required or preferred acquisition date and time, or {@code null}.
*/
@Override
@XmlElement(name = "requestedDate", required = true)
public RequestedDate getRequestedDate() {
return requestedDate;
}
/**
* Sets the required or preferred acquisition date and time.
*
* @param newValue the new requested date value.
*/
public void setRequestedDate(final RequestedDate newValue) {
checkWritePermission(requestedDate);
requestedDate = newValue;
}
/**
* Returns the date and time after which collection is no longer valid.
*
* @return date and time after which collection is no longer valid, or {@code null}.
*/
@Override
@XmlElement(name = "expiryDate", required = true)
public Date getExpiryDate() {
return toDate(expiryDate);
}
/**
* Sets the date and time after which collection is no longer valid.
*
* @param newValue the new expiry date.
*/
public void setExpiryDate(final Date newValue) {
checkWritePermission(toDate(expiryDate));
expiryDate = toMilliseconds(newValue);
}
/**
* Returns the plan that identifies solution to satisfy the requirement.
*
* @return plan that identifies solution to satisfy the requirement.
*/
@Override
@XmlElement(name = "satisifiedPlan") // Really spelled that way in XSD file.
public Collection<Plan> getSatisfiedPlans() {
return satisfiedPlans = nonNullCollection(satisfiedPlans, Plan.class);
}
/**
* @param newValues the new satisfied plans values.
*/
public void setSatisfiedPlans(final Collection<? extends Plan> newValues) {
satisfiedPlans = writeCollection(newValues, satisfiedPlans, Plan.class);
}
}
|
package jetbrains.mps.refactoringTest;
/*Generated by MPS */
import jetbrains.mps.MPSLaunch;
import jetbrains.mps.lang.test.runtime.BaseTransformationTest;
import org.junit.ClassRule;
import jetbrains.mps.lang.test.runtime.TestParametersCache;
import org.junit.Rule;
import jetbrains.mps.lang.test.runtime.RunWithCommand;
import org.junit.Test;
import jetbrains.mps.lang.test.runtime.BaseTestBody;
import jetbrains.mps.lang.test.runtime.TransformationTest;
import jetbrains.mps.baseLanguage.util.plugin.refactorings.InlineMethodRefactoring;
import java.util.List;
import org.jetbrains.mps.openapi.model.SNode;
import jetbrains.mps.internal.collections.runtime.ListSequence;
import java.util.ArrayList;
import org.junit.Assert;
import jetbrains.mps.lang.test.matcher.NodesMatcher;
@MPSLaunch
public class InlineMethodWithUnusedParameters_Test extends BaseTransformationTest {
@ClassRule
public static final TestParametersCache ourParamCache = new TestParametersCache(InlineMethodWithUnusedParameters_Test.class, "${mps_home}", "r:4dc6ffb5-4bbb-4773-b0b7-e52989ceb56f(jetbrains.mps.refactoringTest@tests)", false);
@Rule
public final RunWithCommand myWithCommandRule = new RunWithCommand(this);
public InlineMethodWithUnusedParameters_Test() {
super(ourParamCache);
}
@Test
public void test_InlineMethodWithUnusedParameters() throws Throwable {
new TestBody(this).test_InlineMethodWithUnusedParameters();
}
/*package*/ static class TestBody extends BaseTestBody {
/*package*/ TestBody(TransformationTest owner) {
super(owner);
}
public void test_InlineMethodWithUnusedParameters() throws Exception {
addNodeById("4412735672778826495");
addNodeById("4412735672778826537");
InlineMethodRefactoring ref = new InlineMethodRefactoring(getNodeById("4412735672778826530"));
ref.doRefactor();
{
List<SNode> nodesBefore = ListSequence.fromListAndArray(new ArrayList<SNode>(), getNodeById("4412735672778826503"));
List<SNode> nodesAfter = ListSequence.fromListAndArray(new ArrayList<SNode>(), getNodeById("4412735672778826549"));
Assert.assertTrue("The nodes '" + nodesBefore + "' and '" + nodesAfter + "' do not match!", new NodesMatcher(nodesBefore, nodesAfter).diff().isEmpty());
}
}
}
}
|
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jms.listener;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
import javax.jms.JMSException;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.util.backoff.BackOff;
import org.springframework.util.backoff.BackOffExecution;
import static org.junit.Assert.*;
import static org.mockito.BDDMockito.*;
/**
*
* @author Stephane Nicoll
*/
public class DefaultMessageListenerContainerTests {
@Test
public void applyBackOff() {
BackOff mock = mock(BackOff.class);
BackOffExecution execution = mock(BackOffExecution.class);
given(execution.nextBackOff()).willReturn(BackOffExecution.STOP);
given(mock.start()).willReturn(execution);
DefaultMessageListenerContainer container = createContainer(mock, createFailingContainerFactory());
container.start();
assertEquals(true, container.isRunning());
container.refreshConnectionUntilSuccessful();
assertEquals(false, container.isRunning());
verify(mock).start();
verify(execution).nextBackOff();
}
@Test
public void applyBackOffRetry() {
BackOff mock = mock(BackOff.class);
BackOffExecution execution = mock(BackOffExecution.class);
given(execution.nextBackOff()).willReturn(50L, BackOffExecution.STOP);
given(mock.start()).willReturn(execution);
DefaultMessageListenerContainer container = createContainer(mock, createFailingContainerFactory());
container.start();
container.refreshConnectionUntilSuccessful();
assertEquals(false, container.isRunning());
verify(mock).start();
verify(execution, times(2)).nextBackOff();
}
@Test
public void recoverResetBackOff() {
BackOff mock = mock(BackOff.class);
BackOffExecution execution = mock(BackOffExecution.class);
given(execution.nextBackOff()).willReturn(50L, 50L, 50L); // 3 attempts max
given(mock.start()).willReturn(execution);
DefaultMessageListenerContainer container = createContainer(mock, createRecoverableContainerFactory(1));
container.start();
container.refreshConnectionUntilSuccessful();
assertEquals(true, container.isRunning());
verify(mock).start();
verify(execution, times(1)).nextBackOff(); // only on attempt as the second one lead to a recovery
}
@SuppressWarnings("unchecked")
private DefaultMessageListenerContainer createContainer(BackOff backOff, ConnectionFactory connectionFactory) {
Destination destination = new Destination() {};
DefaultMessageListenerContainer container = new DefaultMessageListenerContainer();
container.setConnectionFactory(connectionFactory);
container.setCacheLevel(DefaultMessageListenerContainer.CACHE_NONE);
container.setDestination(destination);
container.setBackOff(backOff);
return container;
}
private ConnectionFactory createFailingContainerFactory() {
try {
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
given(connectionFactory.createConnection()).will(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
throw new JMSException("Test exception");
}
});
return connectionFactory;
}
catch (JMSException e) {
throw new IllegalStateException(); // never happen
}
}
private ConnectionFactory createRecoverableContainerFactory(final int failingAttempts) {
try {
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
given(connectionFactory.createConnection()).will(new Answer<Object>() {
int currentAttempts = 0;
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
currentAttempts++;
if (currentAttempts <= failingAttempts) {
throw new JMSException("Test exception (attempt " + currentAttempts + ")");
}
else {
return mock(Connection.class);
}
}
});
return connectionFactory;
}
catch (JMSException e) {
throw new IllegalStateException(); // never happen
}
}
}
|
/*
* Copyright 2021 Hazelcast Inc.
*
* Licensed under the Hazelcast Community License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://hazelcast.com/hazelcast-community-license
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.elastic.impl;
import com.hazelcast.function.FunctionEx;
import com.hazelcast.function.SupplierEx;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.search.SearchHit;
import javax.annotation.Nonnull;
import java.io.Serializable;
/**
* Configuration for the Elastic source which is Serializable
*
* Avoids passing multiple parameters via constructors
* Builder -> ElasticSourcePMetaSupplier -> ElasticSourcePSupplier
* -> ElasticSourceP -> ElasticScrollTraverser
*/
public class ElasticSourceConfiguration<T> implements Serializable {
private static final long serialVersionUID = 1L;
private final SupplierEx<RestHighLevelClient> clientFn;
private final SupplierEx<SearchRequest> searchRequestFn;
private final FunctionEx<? super ActionRequest, RequestOptions> optionsFn;
private final FunctionEx<? super SearchHit, T> mapToItemFn;
private final boolean slicing;
private final boolean coLocatedReading;
private final String scrollKeepAlive;
private final int retries;
public ElasticSourceConfiguration(
SupplierEx<RestHighLevelClient> clientFn,
SupplierEx<SearchRequest> searchRequestFn,
FunctionEx<? super ActionRequest, RequestOptions> optionsFn,
FunctionEx<? super SearchHit, T> mapToItemFn,
boolean slicing, boolean coLocatedReading,
String scrollKeepAlive, int retries
) {
this.clientFn = clientFn;
this.searchRequestFn = searchRequestFn;
this.optionsFn = optionsFn;
this.mapToItemFn = mapToItemFn;
this.slicing = slicing;
this.coLocatedReading = coLocatedReading;
this.scrollKeepAlive = scrollKeepAlive;
this.retries = retries;
}
@Nonnull
public SupplierEx<RestHighLevelClient> clientFn() {
return clientFn;
}
@Nonnull
public SupplierEx<SearchRequest> searchRequestFn() {
return searchRequestFn;
}
@Nonnull
public FunctionEx<? super SearchHit, T> mapToItemFn() {
return mapToItemFn;
}
public FunctionEx<? super ActionRequest, RequestOptions> optionsFn() {
return optionsFn;
}
public boolean isSlicingEnabled() {
return slicing;
}
public boolean isCoLocatedReadingEnabled() {
return coLocatedReading;
}
public String scrollKeepAlive() {
return scrollKeepAlive;
}
public int retries() {
return retries;
}
}
|
package com.bumptech.glide.request.target;
import android.graphics.drawable.Drawable;
import android.support.annotation.Nullable;
import com.bumptech.glide.request.Request;
/**
* A base {@link Target} for loading {@link com.bumptech.glide.load.engine.Resource}s that provides
* basic or empty implementations for most methods.
*
* <p> For maximum efficiency, clear this target when you have finished using or displaying the
* {@link com.bumptech.glide.load.engine.Resource} loaded into it using
* {@link com.bumptech.glide.RequestManager#clear(Target)}.</p>
*
* <p> For loading {@link com.bumptech.glide.load.engine.Resource}s into {@link android.view.View}s,
* {@link com.bumptech.glide.request.target.ViewTarget} or
* {@link com.bumptech.glide.request.target.ImageViewTarget} are preferable.</p>
*
* @param <Z> The type of resource that will be received by this target.
*/
public abstract class BaseTarget<Z> implements Target<Z> {
private Request request;
@Override
public void setRequest(@Nullable Request request) {
this.request = request;
}
@Override
@Nullable
public Request getRequest() {
return request;
}
@Override
public void onLoadCleared(@Nullable Drawable placeholder) {
// Do nothing.
}
@Override
public void onLoadStarted(@Nullable Drawable placeholder) {
// Do nothing.
}
@Override
public void onLoadFailed(@Nullable Drawable errorDrawable) {
// Do nothing.
}
@Override
public void onStart() {
// Do nothing.
}
@Override
public void onStop() {
// Do nothing.
}
@Override
public void onDestroy() {
// Do nothing.
}
}
|
package eu.neosurance.sdk.tracer.location;
import android.location.Location;
import android.util.Log;
import org.json.JSONException;
import eu.neosurance.sdk.data.configuration.ConfigurationRepository;
import eu.neosurance.sdk.platform.location.LocationManager;
import eu.neosurance.sdk.tracer.Tracer;
public class LocationTracer implements Tracer {
private static final String TAG = LocationTracer.class.getCanonicalName();
private final LocationManager locationManager;
private final ConfigurationRepository configurationRepository;
private boolean stillLocation;
public LocationTracer(LocationManager locationManager,
ConfigurationRepository configurationRepository) {
this.locationManager = locationManager;
this.configurationRepository = configurationRepository;
}
public LocationManager getLocationManager() {
return locationManager;
}
@Override
public void trace() {
Log.d(TAG, "traceLocation");
try {
if (locationManager.hasLocationPermission()) {
if (configurationRepository.getConf() != null &&
configurationRepository.getConf().getJSONObject("position").getInt("enabled") == 1) {
locationManager.initLocation();
Log.d(TAG, "requestLocationUpdates");
locationManager.stopTraceLocation();
locationManager.requestLocationUpdates();
}
}
} catch (JSONException e) {
Log.e(TAG, "traceLocation", e);
}
}
@Override
public void stopTrace() {
this.locationManager.stopTraceLocation();
}
public void setLastLocation(Location lastLocation) {
this.getLocationManager().setLastLocation(lastLocation);
}
public Location getLastLocation() {
return this.getLocationManager().getLastLocation();
}
public boolean getStillLocation() {
return stillLocation;
}
public void setStillLocation(boolean stillLocation) {
this.stillLocation = stillLocation;
}
}
|
package org.hisp.dhis.user;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
* The main interface for working with user settings. Implementation need to get
* the current user from {@link CurrentUserService}.
*
* @author Torgeir Lorange Ostby
* @version $Id: UserSettingService.java 2869 2007-02-20 14:26:09Z andegje $
*/
public interface UserSettingService
{
String ID = UserSettingService.class.getName();
final String AUTO_SAVE_DATA_ENTRY_FORM = "autoSaveDataEntryForm";
final String KEY_CHARTS_IN_DASHBOARD = "keyChartsInDashboard";
final String KEY_CURRENT_DATADICTIONARY = "currentDataDictionary";
final String KEY_STYLE = "stylesheet";
final String KEY_STYLE_DIRECTORY = "stylesheetDirectory";
final String KEY_MESSAGE_EMAIL_NOTIFICATION = "keyMessageEmailNotification";
final String KEY_MESSAGE_SMS_NOTIFICATION = "keyMessageSmsNotification";
final String KEY_DB_LOCALE = "keyLocaleUserSetting";
final int DEFAULT_CHARTS_IN_DASHBOARD = 4;
final List<Integer> DASHBOARD_CHARTS_TO_DISPLAY = Arrays.asList( 4, 6, 8 );
/**
* Saves the name/value pair as a user setting connected to the currently
* logged in user.
*
* @param name the name/handle of the value.
* @param value the value to store.
* @throws NoCurrentUserException if there is no current user.
*/
void saveUserSetting( String name, Serializable value );
/**
* Returns the value of the user setting specified by the given name.
*
* @param name the name of the user setting.
* @return the value corresponding to the named user setting, or null if
* there is no match.
* @throws NoCurrentUserException if there is no current user.
*/
Serializable getUserSetting( String name );
/**
* Returns the value of the user setting specified by the given name. If
* there is no current user or the user setting doesn't exist, the specified
* default value is returned.
*
* @param name the name of the user setting.
* @param defaultValue the value to return if there is no current user or no
* user setting correspoinding to the given name.
* @return the value corresponding to the names user setting, or the default
* value if there is no current user or matching user setting.
*/
Serializable getUserSetting( String name, Serializable defaultValue );
/**
* Returns all user settings belonging to the current user.
*
* @return all user settings belonging to the current user.
* @throws NoCurrentUserException if there is no current user.
*/
Collection<UserSetting> getAllUserSettings();
/**
* Deletes the user setting with the given name.
*
* @param name the name of the user setting to delete.
* @throws NoCurrentUserException if there is no current user.
*/
void deleteUserSetting( String name );
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.api.debugger.jpda;
import junit.framework.Test;
import org.netbeans.api.debugger.ActionsManager;
import org.netbeans.api.debugger.DebuggerManager;
import org.netbeans.junit.NbTestCase;
/**
* Tests JPDA stepping actions: step in, step out and step over.
*
* @author Maros Sandor, Jan Jancura
*/
public class StepTest extends NbTestCase {
private DebuggerManager dm = DebuggerManager.getDebuggerManager ();
private String sourceRoot = System.getProperty ("test.dir.src");
private JPDASupport support;
public StepTest (String s) {
super (s);
}
public static Test suite() {
return JPDASupport.createTestSuite(StepTest.class);
}
public void testStepOver () throws Exception {
try {
JPDASupport.removeAllBreakpoints ();
Utils.BreakPositions bp = Utils.getBreakPositions(sourceRoot +
"org/netbeans/api/debugger/jpda/testapps/StepApp.java");
LineBreakpoint lb = bp.getLineBreakpoints().get(0);
dm.addBreakpoint (lb);
support = JPDASupport.attach
("org.netbeans.api.debugger.jpda.testapps.StepApp");
support.waitState (JPDADebugger.STATE_STOPPED);
dm.removeBreakpoint (lb);
int line = lb.getLineNumber();
assertEquals (
"Execution stopped in wrong class",
support.getDebugger ().getCurrentCallStackFrame ().
getClassName (),
"org.netbeans.api.debugger.jpda.testapps.StepApp"
);
assertEquals (
"Execution stopped at wrong line",
line,
support.getDebugger ().getCurrentCallStackFrame ().
getLineNumber (null)
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
++line
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
++line
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
++line
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
++line
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
++line
);
support.doContinue ();
support.waitState (JPDADebugger.STATE_DISCONNECTED);
} finally {
support.doFinish ();
}
}
public void testStepInto () throws Exception {
try {
JPDASupport.removeAllBreakpoints ();
Utils.BreakPositions bp = Utils.getBreakPositions(sourceRoot +
"org/netbeans/api/debugger/jpda/testapps/StepApp.java");
LineBreakpoint lb = bp.getLineBreakpoints().get(0);
dm.addBreakpoint (lb);
support = JPDASupport.attach
("org.netbeans.api.debugger.jpda.testapps.StepApp");
support.waitState (JPDADebugger.STATE_STOPPED);
dm.removeBreakpoint (lb);
assertEquals (
"Execution stopped in wrong class",
support.getDebugger ().getCurrentCallStackFrame ().
getClassName (),
"org.netbeans.api.debugger.jpda.testapps.StepApp"
);
assertEquals (
"Execution stopped at wrong line",
lb.getLineNumber(),
support.getDebugger ().getCurrentCallStackFrame ().
getLineNumber (null)
);
stepCheck (
ActionsManager.ACTION_STEP_INTO,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("1into")
);
// stepCheck (ActionsManager.ACTION_STEP_INTO, "java.lang.Object", -1);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("1into") + 1
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
lb.getLineNumber()
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
lb.getLineNumber() + 1
);
stepCheck (
ActionsManager.ACTION_STEP_INTO,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into1")
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into2")
);
stepCheck (
ActionsManager.ACTION_STEP_INTO,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into3")
);
support.doContinue ();
support.waitState (JPDADebugger.STATE_DISCONNECTED);
} finally {
support.doFinish ();
}
}
public void testStepOut () throws Exception {
try {
JPDASupport.removeAllBreakpoints ();
Utils.BreakPositions bp = Utils.getBreakPositions(sourceRoot +
"org/netbeans/api/debugger/jpda/testapps/StepApp.java");
LineBreakpoint lb = bp.getLineBreakpoints().get(0);
dm.addBreakpoint (lb);
support = JPDASupport.attach
("org.netbeans.api.debugger.jpda.testapps.StepApp");
support.waitState (JPDADebugger.STATE_STOPPED);
dm.removeBreakpoint (lb);
assertEquals (
"Execution stopped in wrong class",
support.getDebugger ().getCurrentCallStackFrame ().
getClassName (),
"org.netbeans.api.debugger.jpda.testapps.StepApp"
);
assertEquals (
"Execution stopped at wrong line",
lb.getLineNumber(),
support.getDebugger ().getCurrentCallStackFrame ().
getLineNumber (null)
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
lb.getLineNumber() + 1
);
stepCheck (
ActionsManager.ACTION_STEP_INTO,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into1")
);
stepCheck (
ActionsManager.ACTION_STEP_OVER,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into2")
);
stepCheck (
ActionsManager.ACTION_STEP_INTO,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into3")
);
stepCheck (
ActionsManager.ACTION_STEP_OUT,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
bp.getStopLine("Into2")
);
stepCheck (
ActionsManager.ACTION_STEP_OUT,
"org.netbeans.api.debugger.jpda.testapps.StepApp",
lb.getLineNumber() + 1
);
support.doContinue ();
support.waitState (JPDADebugger.STATE_DISCONNECTED);
} finally {
support.doFinish ();
}
}
public void testStepAndContinueOnBP() throws Exception {
String app = "org.netbeans.api.debugger.jpda.testapps.StepAndContinueApp";
try {
JPDASupport.removeAllBreakpoints();
Utils.BreakPositions bp = Utils.getBreakPositions(sourceRoot + app.replace('.', '/') + ".java");
LineBreakpoint lb = bp.getLineBreakpoints().get(0);
dm.addBreakpoint(lb);
support = JPDASupport.attach(app);
support.waitState (JPDADebugger.STATE_STOPPED);
dm.removeBreakpoint(lb);
assertEquals (
"Execution stopped in wrong class",
support.getDebugger().getCurrentCallStackFrame().getClassName(),
app
);
assertEquals (
"Execution stopped at wrong line",
lb.getLineNumber(),
support.getDebugger().getCurrentCallStackFrame().getLineNumber(null)
);
lb = bp.getLineBreakpoints().get(1);
dm.addBreakpoint(lb);
support.stepOver();
suspendedLineCheck(bp.getStopLine("Over1"));
support.stepOver();
breakpointCheckEvalCont(lb.getLineNumber());
suspendedLineCheck(bp.getStopLine("Over2"));
support.stepOver();
breakpointCheckEvalCont(lb.getLineNumber());
suspendedLineCheck(bp.getStopLine("Over3"));
support.stepOver();
suspendedLineCheck(bp.getStopLine("Over4"));
support.stepOver();
suspendedLineCheck(bp.getStopLine("Over5"));
support.stepInto();
suspendedLineCheck(bp.getStopLine("Into6"));
support.stepOut();
breakpointCheckEvalCont(lb.getLineNumber());
support.stepOver();
suspendedLineCheck(bp.getStopLine("Out7"));
support.stepInto();
suspendedLineCheck(bp.getStopLine("Into8"));
support.stepOut();
breakpointCheckEvalCont(lb.getLineNumber());
suspendedLineCheck(bp.getStopLine("Out7"));
} finally {
support.doFinish ();
}
}
private void stepCheck (
Object stepType,
String clsExpected,
int lineExpected
) {
support.step (stepType);
assertEquals(
"Execution stopped in wrong class",
clsExpected,
support.getDebugger ().getCurrentCallStackFrame ().getClassName ()
);
assertEquals (
"Execution stopped at wrong line",
lineExpected,
support.getDebugger ().getCurrentCallStackFrame ().
getLineNumber (null)
);
}
private void breakpointCheckEvalCont(int lineExpected) {
suspendedLineCheck(lineExpected);
// We invoke a method:
try {
assertEquals("10", support.getDebugger().evaluate("m1()").getValue());
} catch (InvalidExpressionException ex) {
throw new AssertionError(ex);
}
// Then we do Continue to finish the original step:
support.doContinue();
support.waitState(JPDADebugger.STATE_STOPPED);
}
private void suspendedLineCheck(int lineExpected) {
assertEquals (
"Execution stopped at wrong line",
lineExpected,
support.getDebugger().getCurrentCallStackFrame().getLineNumber(null)
);
}
}
|
/*
* This file is part of Baritone.
*
* Baritone is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Baritone is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Baritone. If not, see <https://www.gnu.org/licenses/>.
*/
package baritone.api.command;
import baritone.api.command.argument.IArgConsumer;
import baritone.api.command.exception.CommandException;
import baritone.api.utils.Helper;
import java.util.List;
import java.util.stream.Stream;
/**
* The base for a command.
*
* @author Brady
* @since 10/7/2019
*/
public interface ICommand extends Helper {
/**
* Called when this command is executed.
*/
void execute(String label, IArgConsumer args) throws CommandException;
/**
* Called when the command needs to tab complete. Return a Stream representing the entries to put in the completions
* list.
*/
Stream<String> tabComplete(String label, IArgConsumer args) throws CommandException;
/**
* @return A <b>single-line</b> string containing a short description of this command's purpose.
*/
String getShortDesc();
/**
* @return A list of lines that will be printed by the help command when the user wishes to view them.
*/
List<String> getLongDesc();
/**
* @return A list of the names that can be accepted to have arguments passed to this command
*/
List<String> getNames();
/**
* @return {@code true} if this command should be hidden from the help menu
*/
default boolean hiddenFromHelp() {
return false;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.jms.command;
import org.apache.karaf.shell.commands.Argument;
import org.apache.karaf.shell.commands.Command;
import org.apache.karaf.shell.commands.Option;
@Command(scope = "jms", name = "send", description = "Send a message to ")
public class SendCommand extends JmsConnectionCommandSupport {
@Argument(index = 1, name = "queue", description = "The JMS queue name", required = true, multiValued = false)
String queue;
@Argument(index = 2, name = "message", description = "The JMS message content", required = true, multiValued = false)
String message;
@Option(name = "-r", aliases = { "--replyTo" }, description = "Set the message ReplyTo", required = false, multiValued = false)
String replyTo;
public Object doExecute() throws Exception {
getJmsService().send(connectionFactory, queue, message, replyTo, username, password);
return null;
}
}
|
package com.web.blog.dto;
import java.time.LocalDate;
import java.time.LocalTime;
import org.springframework.format.annotation.DateTimeFormat;
public class BandShowlist {
private String showId;
@DateTimeFormat(pattern = "yyyy-MM-dd")
private LocalDate date;
private String title;
@DateTimeFormat(pattern = "HH:mm")
private LocalTime time;
private String showContent;
private String img;
private String bandId;
private String name;
private String shows;
public BandShowlist() {
}
public BandShowlist(String showId, LocalDate date, String title, LocalTime time, String showContent, String img,
String bandId, String name, String shows) {
super();
this.showId = showId;
this.date = date;
this.title = title;
this.time = time;
this.showContent = showContent;
this.img = img;
this.bandId = bandId;
this.name = name;
this.shows = shows;
}
public String getShowId() {
return showId;
}
public void setShowId(String showId) {
this.showId = showId;
}
public LocalDate getDate() {
return date;
}
public void setDate(LocalDate date) {
this.date = date;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public LocalTime getTime() {
return time;
}
public void setTime(LocalTime time) {
this.time = time;
}
public String getShowContent() {
return showContent;
}
public void setShowContent(String showContent) {
this.showContent = showContent;
}
public String getImg() {
return img;
}
public void setImg(String img) {
this.img = img;
}
public String getBandId() {
return bandId;
}
public void setBandId(String bandId) {
this.bandId = bandId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getShows() {
return shows;
}
public void setShows(String shows) {
this.shows = shows;
}
}
|
package org.benetech.servicenet.converter;
import java.io.File;
import java.io.InputStream;
import java.io.StringReader;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.StringUtils;
import org.benetech.servicenet.util.BsonUtils;
import org.benetech.servicenet.util.StreamUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
public class CSVFileConverter extends AbstractDataConverter {
private final Logger log = LoggerFactory.getLogger(CSVFileConverter.class);
private char delimiter = ',';
public CSVFileConverter(String delimiter) {
if (StringUtils.isNotBlank(delimiter)) {
this.delimiter = delimiter.charAt(0);
}
}
private File convertRecords(List<CSVRecord> originalRecords) {
List<Object> convertedRecords = new ArrayList<>();
for (CSVRecord record : originalRecords) {
convertedRecords.add(record.toMap());
}
try {
File tempFile = StreamUtils.temporaryFile();
StreamUtils.writeJsonStream(tempFile, convertedRecords);
return tempFile;
} catch (IOException e) {
log.error(e.getMessage(), e);
return null;
}
}
public String convert(InputStream is) throws IOException {
List<CSVRecord> originalRecords = CSVFormat.DEFAULT.withHeader().withDelimiter(delimiter).parse(
new InputStreamReader(is)).getRecords();
is.close();
File file = convertRecords(originalRecords);
return Files.readString(Paths.get(file.getPath()));
}
public File convertToFile(String csv) throws IOException {
List<CSVRecord> originalRecords = CSVFormat.DEFAULT.withHeader().withDelimiter(delimiter).parse(
new StringReader(csv)).getRecords();
return convertRecords(originalRecords);
}
@Override
public ImportData convert(Object data) throws IOException {
ImportData conversionOutput = new ImportData();
File file = convertToFile(BsonUtils.docToString(data));
conversionOutput.setJson(Files.readString(Paths.get(file.getPath())));
return conversionOutput;
}
}
|
/*
* The MIT License
*
* Copyright 2020 Sergey Sidorov/000ssg@gmail.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package ssg.lib.common;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author 000ssg
*/
public class HuffmanTest {
public HuffmanTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of getDictionary method, of class Huffman.
*/
@Test
public void testGetDictionary() {
System.out.println("getDictionary");
Huffman instance = new HuffmanImpl();
Map result = instance.getDictionary();
assertEquals(11, result.size());
}
/**
* Test of getRoot method, of class Huffman.
*/
@Test
public void testGetRoot() {
System.out.println("getRoot");
Huffman instance = new HuffmanImpl();
Huffman.HTree result = instance.getRoot();
assertNotNull(result);
assertNull(result.value);
assertNotNull(result.zero);
assertNotNull(result.one);
assertEquals(0, result.level);
assertEquals(1, result.zero.level);
assertEquals(1, result.one.level);
}
/**
* Test of getEncoder method, of class Huffman.
*/
@Test
public void testGetEncoder() {
System.out.println("getEncoder");
Huffman instance = new HuffmanImpl();
Huffman.Encoder result = instance.getEncoder();
assertNotNull(result);
}
/**
* Test of getDecoder method, of class Huffman.
*/
@Test
public void testGetDecoder() {
System.out.println("getDecoder");
Huffman instance = new HuffmanImpl();
Huffman.Decoder result = instance.getDecoder();
assertNotNull(result);
}
/**
* Test of buildTree method, of class Huffman.
*/
@Test
public void testBuildTree() throws Exception {
System.out.println("buildTree");
SmallHuffman huff = new SmallHuffman();
Collection<Integer> consumed = new HashSet<Integer>();
Huffman.HTree result = Huffman.buildTree(null, null, huff.getDictionary(), consumed);
assertEquals(huff.root.toString(), result.toString());
Huffman.Encoder enc = huff.getEncoder();
Huffman.Decoder dec = huff.getDecoder();
for (String testText : new String[]{
"0000000000"
}) {
enc.reset();
dec.reset();
System.out.println("\nSOURCE[" + testText.getBytes("ISO-8859-1").length + "]: " + testText);
if (huff.DEBUG) {
for (byte b : testText.getBytes("ISO-8859-1")) {
System.out.print(Integer.toBinaryString(0xFF & b) + "");
}
System.out.println();
}
enc.add(testText.getBytes());
enc.close();
byte[] bb = enc.getValue();
System.out.println("ENCODED[" + bb.length + "]");
if (huff.DEBUG) {
for (int i = 0; i < bb.length; i++) {
System.out.print(Integer.toBinaryString(0xFF & bb[i]) + "");
}
System.out.println();
for (int i = 0; i < bb.length; i++) {
System.out.print(Integer.toHexString(0xFF & bb[i]) + "");
if (i % 2 == 0 && i > 0) {
System.out.print(" ");
}
}
System.out.println();
}
dec.add(bb);
dec.close();
byte[] bb2 = dec.getValue();
System.out.println("DECODED[" + bb2.length + "]");
System.out.println("RESULT: " + new String(bb2, "ISO-8859-1"));
System.out.println("Matches: " + testText.equals(new String(bb2, "ISO-8859-1")));
}
}
public static class HuffmanImpl extends SmallHuffman {
}
}
|
// Targeted by JavaCPP version 1.5.1-SNAPSHOT: DO NOT EDIT THIS FILE
package org.bytedeco.tensorflow;
import org.bytedeco.tensorflow.Allocator;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.tensorflow.global.tensorflow.*;
/** Gather slices from {@code params} according to {@code indices}.
*
* {@code indices} must be an integer tensor of any dimension (usually 0-D or 1-D).
* Produces an output tensor with shape {@code indices.shape + params.shape[1:]} where:
*
* <pre>{@code python
* # Scalar indices
* output[:, ..., :] = params[indices, :, ... :]
*
* # Vector indices
* output[i, :, ..., :] = params[indices[i], :, ... :]
*
* # Higher rank indices
* output[i, ..., j, :, ... :] = params[indices[i, ..., j], :, ..., :]
* }</pre>
*
* If {@code indices} is a permutation and {@code len(indices) == params.shape[0]} then
* this operation will permute {@code params} accordingly.
*
* {@code validate_indices}: DEPRECATED. If this operation is assigned to CPU, values in
* {@code indices} are always validated to be within range. If assigned to GPU,
* out-of-bound indices result in safe but unspecified behavior, which may include
* raising an error.
*
* <div style="width:70%; margin:auto; margin-bottom:10px; margin-top:20px;">
* <img style="width:100%" src="https://www.tensorflow.org/images/Gather.png" alt>
* </div>
*
* Arguments:
* * scope: A Scope object
*
* Returns:
* * {@code Output}: The output tensor. */
@Namespace("tensorflow::ops") @NoOffset @Properties(inherit = org.bytedeco.tensorflow.presets.tensorflow.class)
public class Gather extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Gather(Pointer p) { super(p); }
/** Optional attribute setters for Gather */
public static class Attrs extends Pointer {
static { Loader.load(); }
/** Default native constructor. */
public Attrs() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Attrs(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Attrs(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public Attrs position(long position) {
return (Attrs)super.position(position);
}
/** Defaults to true */
public native @ByVal Attrs ValidateIndices(@Cast("bool") boolean x);
public native @Cast("bool") boolean validate_indices_(); public native Attrs validate_indices_(boolean setter);
}
public Gather(@Const @ByRef Scope scope, @ByVal Input params,
@ByVal Input indices) { super((Pointer)null); allocate(scope, params, indices); }
private native void allocate(@Const @ByRef Scope scope, @ByVal Input params,
@ByVal Input indices);
public Gather(@Const @ByRef Scope scope, @ByVal Input params,
@ByVal Input indices, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, params, indices, attrs); }
private native void allocate(@Const @ByRef Scope scope, @ByVal Input params,
@ByVal Input indices, @Const @ByRef Attrs attrs);
public native @ByVal @Name("operator tensorflow::Output") Output asOutput();
public native @ByVal @Name("operator tensorflow::Input") Input asInput();
public native Node node();
public static native @ByVal Attrs ValidateIndices(@Cast("bool") boolean x);
public native @ByRef Operation operation(); public native Gather operation(Operation setter);
public native @ByRef Output output(); public native Gather output(Output setter);
}
|
package com.ppiech.auto.value.jackson.typeconverters;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Calendar;
public abstract class CalendarTypeConverter implements TypeConverter<Calendar> {
// DateFormat is not thread-safe, so wrap it in a ThreadLocal
private final ThreadLocal<DateFormat> mDateFormat = new ThreadLocal<DateFormat>(){
@Override
protected DateFormat initialValue() {
return getDateFormat();
}
};
@Override
public Calendar parse(JsonParser jsonParser) throws IOException {
String dateString = jsonParser.getValueAsString(null);
try {
Calendar calendar = Calendar.getInstance();
calendar.setTime(mDateFormat.get().parse(dateString));
return calendar;
} catch (ParseException e) {
return null;
}
}
@Override
public void serialize(Calendar object, String fieldName, boolean writeFieldNameForObject, JsonGenerator jsonGenerator) throws IOException {
if (fieldName != null) {
jsonGenerator.writeStringField(fieldName, mDateFormat.get().format(object.getTimeInMillis()));
} else {
jsonGenerator.writeString(mDateFormat.get().format(object.getTimeInMillis()));
}
}
/** Called to get the DateFormat used to parse and serialize objects */
public abstract DateFormat getDateFormat();
}
|
/*
* Copyright 2010-2021 Australian Signals Directorate
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.gov.asd.tac.constellation.graph.value.variables;
import au.gov.asd.tac.constellation.graph.value.readables.CharReadable;
import au.gov.asd.tac.constellation.graph.value.writables.CharWritable;
/**
*
* @author sirius
*/
public interface CharVariable extends CharReadable, CharWritable {
}
|
package com.palmg.boot.webcore.scan.conversion.impl;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import com.palmg.boot.webcore.Application;
import com.palmg.boot.webcore.scan.PackageScan;
import com.palmg.boot.webcore.scan.conversion.AnnotConver;
import com.palmg.boot.webcore.scan.conversion.AnnotConverExceprion;
public class JpaDaoScanConver extends AnnotConver {
@Override
public void conver(Class<?> converClass, PackageScan scan) throws AnnotConverExceprion {
modify(Application.class, EnableJpaRepositories.class, "basePackages", scan.getJpaScanPackage());
}
}
|
package io.github.prospector.modmenu.gui.entries;
import com.mojang.blaze3d.platform.GlStateManager;
import io.github.prospector.modmenu.ModMenu;
import io.github.prospector.modmenu.gui.ModListEntry;
import io.github.prospector.modmenu.gui.ModListWidget;
import net.fabricmc.loader.api.ModContainer;
import net.minecraft.client.font.TextRenderer;
import net.minecraft.client.gui.DrawableHelper;
import net.minecraft.util.Identifier;
import org.lwjgl.glfw.GLFW;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
public class ParentEntry extends ModListEntry {
private static final Identifier PARENT_MOD_TEXTURE = new Identifier(ModMenu.MOD_ID, "textures/gui/parent_mod.png");
protected List<ModContainer> children;
protected ModListWidget list;
protected boolean hoveringIcon = false;
public ParentEntry(ModContainer parent, List<ModContainer> children, ModListWidget list) {
super(parent, list);
this.children = children;
this.list = list;
}
@Override
public void render(int index, int y, int x, int rowWidth, int rowHeight, int mouseX, int mouseY, boolean isSelected, float delta) {
super.render(index, y, x, rowWidth, rowHeight, mouseX, mouseY, isSelected, delta);
TextRenderer font = client.textRenderer;
int childrenBadgeHeight = font.fontHeight;
int childrenBadgeWidth = font.fontHeight;
int children = getChildren().size();
int childrenWidth = font.getStringWidth(Integer.toString(children)) - 1;
if (childrenBadgeWidth < childrenWidth + 4) {
childrenBadgeWidth = childrenWidth + 4;
}
int childrenBadgeX = x + 32 - childrenBadgeWidth;
int childrenBadgeY = y + 32 - childrenBadgeHeight;
int childrenOutlineColor = 0x8810d098;
int childrenFillColor = 0x88046146;
DrawableHelper.fill(childrenBadgeX + 1, childrenBadgeY, childrenBadgeX + childrenBadgeWidth - 1, childrenBadgeY + 1, childrenOutlineColor);
DrawableHelper.fill(childrenBadgeX, childrenBadgeY + 1, childrenBadgeX + 1, childrenBadgeY + childrenBadgeHeight - 1, childrenOutlineColor);
DrawableHelper.fill(childrenBadgeX + childrenBadgeWidth - 1, childrenBadgeY + 1, childrenBadgeX + childrenBadgeWidth, childrenBadgeY + childrenBadgeHeight - 1, childrenOutlineColor);
DrawableHelper.fill(childrenBadgeX + 1, childrenBadgeY + 1, childrenBadgeX + childrenBadgeWidth - 1, childrenBadgeY + childrenBadgeHeight - 1, childrenFillColor);
DrawableHelper.fill(childrenBadgeX + 1, childrenBadgeY + childrenBadgeHeight - 1, childrenBadgeX + childrenBadgeWidth - 1, childrenBadgeY + childrenBadgeHeight, childrenOutlineColor);
font.draw(Integer.toString(children), childrenBadgeX + childrenBadgeWidth / 2 - childrenWidth / 2, childrenBadgeY + 1, 0xCACACA);
this.hoveringIcon = mouseX >= x - 1 && mouseX <= x - 1 + 32 && mouseY >= y - 1 && mouseY <= y - 1 + 32;
if (isMouseOver(mouseX, mouseY)) {
DrawableHelper.fill(x, y, x + 32, y + 32, 0xA0909090);
this.client.getTextureManager().bindTexture(PARENT_MOD_TEXTURE);
int xOffset = list.getParent().showModChildren.contains(getMetadata().getId()) ? 32 : 0;
int yOffset = hoveringIcon ? 32 : 0;
GlStateManager.color4f(1.0F, 1.0F, 1.0F, 1.0F);
DrawableHelper.blit(x, y, xOffset, yOffset, 32 + xOffset, 32 + yOffset, 256, 256);
}
}
@Override
public boolean mouseClicked(double mouseX, double mouseY, int i) {
if (hoveringIcon) {
String id = getMetadata().getId();
if (list.getParent().showModChildren.contains(id)) {
list.getParent().showModChildren.remove(id);
} else {
list.getParent().showModChildren.add(id);
}
list.filter(list.getParent().getSearchInput(), false);
}
return super.mouseClicked(mouseX, mouseY, i);
}
@Override
public boolean keyPressed(int int_1, int int_2, int int_3) {
if (int_1 == GLFW.GLFW_KEY_ENTER) {
String id = getMetadata().getId();
if (list.getParent().showModChildren.contains(id)) {
list.getParent().showModChildren.remove(id);
} else {
list.getParent().showModChildren.add(id);
}
list.filter(list.getParent().getSearchInput(), false);
return true;
}
return super.keyPressed(int_1, int_2, int_3);
}
public void setChildren(List<ModContainer> children) {
this.children = children;
}
public void addChildren(List<ModContainer> children) {
this.children.addAll(children);
}
public void addChildren(ModContainer... children) {
this.children.addAll(Arrays.asList(children));
}
public List<ModContainer> getChildren() {
return children;
}
public boolean isMouseOver(double double_1, double double_2) {
return Objects.equals(this.list.getEntryAtPos(double_1, double_2), this);
}
}
|
package com.arrow.pegasus;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import com.arrow.pegasus.data.Enabled;
import moonstone.acs.AcsLogicalException;
import moonstone.acs.Loggable;
public abstract class LifeCycleAbstract extends Loggable {
private boolean terminating = false;
@PostConstruct
protected void postConstruct() {
}
@PreDestroy
protected void preDestroy() {
setTerminating(true);
}
protected boolean isTerminating() {
return terminating;
}
protected void setTerminating(boolean terminating) {
this.terminating = terminating;
}
protected void checkEnabled(Enabled resource, String name) {
if (resource == null) {
throw new AcsLogicalException(name + " is not found");
}
if (!resource.isEnabled()) {
throw new AcsLogicalException(name + " is disabled");
}
}
}
|
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* gradle plugin from the resource data it found. It
* should not be modified by hand.
*/
package androidx.core;
public final class R {
private R() {}
public static final class attr {
private attr() {}
public static final int alpha = 0x7f030027;
public static final int font = 0x7f030082;
public static final int fontProviderAuthority = 0x7f030084;
public static final int fontProviderCerts = 0x7f030085;
public static final int fontProviderFetchStrategy = 0x7f030086;
public static final int fontProviderFetchTimeout = 0x7f030087;
public static final int fontProviderPackage = 0x7f030088;
public static final int fontProviderQuery = 0x7f030089;
public static final int fontStyle = 0x7f03008a;
public static final int fontVariationSettings = 0x7f03008b;
public static final int fontWeight = 0x7f03008c;
public static final int ttcIndex = 0x7f030142;
}
public static final class color {
private color() {}
public static final int notification_action_color_filter = 0x7f050040;
public static final int notification_icon_bg_color = 0x7f050041;
public static final int ripple_material_light = 0x7f05004b;
public static final int secondary_text_default_material_light = 0x7f05004d;
}
public static final class dimen {
private dimen() {}
public static final int compat_button_inset_horizontal_material = 0x7f06004e;
public static final int compat_button_inset_vertical_material = 0x7f06004f;
public static final int compat_button_padding_horizontal_material = 0x7f060050;
public static final int compat_button_padding_vertical_material = 0x7f060051;
public static final int compat_control_corner_material = 0x7f060052;
public static final int compat_notification_large_icon_max_height = 0x7f060053;
public static final int compat_notification_large_icon_max_width = 0x7f060054;
public static final int notification_action_icon_size = 0x7f06005e;
public static final int notification_action_text_size = 0x7f06005f;
public static final int notification_big_circle_margin = 0x7f060060;
public static final int notification_content_margin_start = 0x7f060061;
public static final int notification_large_icon_height = 0x7f060062;
public static final int notification_large_icon_width = 0x7f060063;
public static final int notification_main_column_padding_top = 0x7f060064;
public static final int notification_media_narrow_margin = 0x7f060065;
public static final int notification_right_icon_size = 0x7f060066;
public static final int notification_right_side_padding_top = 0x7f060067;
public static final int notification_small_icon_background_padding = 0x7f060068;
public static final int notification_small_icon_size_as_large = 0x7f060069;
public static final int notification_subtext_size = 0x7f06006a;
public static final int notification_top_pad = 0x7f06006b;
public static final int notification_top_pad_large_text = 0x7f06006c;
}
public static final class drawable {
private drawable() {}
public static final int notification_action_background = 0x7f070061;
public static final int notification_bg = 0x7f070062;
public static final int notification_bg_low = 0x7f070063;
public static final int notification_bg_low_normal = 0x7f070064;
public static final int notification_bg_low_pressed = 0x7f070065;
public static final int notification_bg_normal = 0x7f070066;
public static final int notification_bg_normal_pressed = 0x7f070067;
public static final int notification_icon_background = 0x7f070068;
public static final int notification_template_icon_bg = 0x7f070069;
public static final int notification_template_icon_low_bg = 0x7f07006a;
public static final int notification_tile_bg = 0x7f07006b;
public static final int notify_panel_notification_icon_bg = 0x7f07006c;
}
public static final class id {
private id() {}
public static final int accessibility_action_clickable_span = 0x7f080006;
public static final int accessibility_custom_action_0 = 0x7f080007;
public static final int accessibility_custom_action_1 = 0x7f080008;
public static final int accessibility_custom_action_10 = 0x7f080009;
public static final int accessibility_custom_action_11 = 0x7f08000a;
public static final int accessibility_custom_action_12 = 0x7f08000b;
public static final int accessibility_custom_action_13 = 0x7f08000c;
public static final int accessibility_custom_action_14 = 0x7f08000d;
public static final int accessibility_custom_action_15 = 0x7f08000e;
public static final int accessibility_custom_action_16 = 0x7f08000f;
public static final int accessibility_custom_action_17 = 0x7f080010;
public static final int accessibility_custom_action_18 = 0x7f080011;
public static final int accessibility_custom_action_19 = 0x7f080012;
public static final int accessibility_custom_action_2 = 0x7f080013;
public static final int accessibility_custom_action_20 = 0x7f080014;
public static final int accessibility_custom_action_21 = 0x7f080015;
public static final int accessibility_custom_action_22 = 0x7f080016;
public static final int accessibility_custom_action_23 = 0x7f080017;
public static final int accessibility_custom_action_24 = 0x7f080018;
public static final int accessibility_custom_action_25 = 0x7f080019;
public static final int accessibility_custom_action_26 = 0x7f08001a;
public static final int accessibility_custom_action_27 = 0x7f08001b;
public static final int accessibility_custom_action_28 = 0x7f08001c;
public static final int accessibility_custom_action_29 = 0x7f08001d;
public static final int accessibility_custom_action_3 = 0x7f08001e;
public static final int accessibility_custom_action_30 = 0x7f08001f;
public static final int accessibility_custom_action_31 = 0x7f080020;
public static final int accessibility_custom_action_4 = 0x7f080021;
public static final int accessibility_custom_action_5 = 0x7f080022;
public static final int accessibility_custom_action_6 = 0x7f080023;
public static final int accessibility_custom_action_7 = 0x7f080024;
public static final int accessibility_custom_action_8 = 0x7f080025;
public static final int accessibility_custom_action_9 = 0x7f080026;
public static final int action_container = 0x7f08002e;
public static final int action_divider = 0x7f080030;
public static final int action_image = 0x7f080031;
public static final int action_text = 0x7f080037;
public static final int actions = 0x7f080038;
public static final int async = 0x7f08003d;
public static final int blocking = 0x7f080040;
public static final int chronometer = 0x7f08005f;
public static final int dialog_button = 0x7f080067;
public static final int forever = 0x7f08006f;
public static final int icon = 0x7f080075;
public static final int icon_group = 0x7f080076;
public static final int info = 0x7f080079;
public static final int italic = 0x7f08007b;
public static final int line1 = 0x7f08007f;
public static final int line3 = 0x7f080080;
public static final int normal = 0x7f080088;
public static final int notification_background = 0x7f080089;
public static final int notification_main_column = 0x7f08008a;
public static final int notification_main_column_container = 0x7f08008b;
public static final int right_icon = 0x7f080096;
public static final int right_side = 0x7f080097;
public static final int tag_accessibility_actions = 0x7f0800b8;
public static final int tag_accessibility_clickable_spans = 0x7f0800b9;
public static final int tag_accessibility_heading = 0x7f0800ba;
public static final int tag_accessibility_pane_title = 0x7f0800bb;
public static final int tag_screen_reader_focusable = 0x7f0800bc;
public static final int tag_transition_group = 0x7f0800bd;
public static final int tag_unhandled_key_event_manager = 0x7f0800be;
public static final int tag_unhandled_key_listeners = 0x7f0800bf;
public static final int text = 0x7f0800c0;
public static final int text2 = 0x7f0800c1;
public static final int time = 0x7f0800c4;
public static final int title = 0x7f0800c5;
}
public static final class integer {
private integer() {}
public static final int status_bar_notification_info_maxnum = 0x7f090004;
}
public static final class layout {
private layout() {}
public static final int custom_dialog = 0x7f0b001e;
public static final int notification_action = 0x7f0b001f;
public static final int notification_action_tombstone = 0x7f0b0020;
public static final int notification_template_custom_big = 0x7f0b0021;
public static final int notification_template_icon_group = 0x7f0b0022;
public static final int notification_template_part_chronometer = 0x7f0b0023;
public static final int notification_template_part_time = 0x7f0b0024;
}
public static final class string {
private string() {}
public static final int status_bar_notification_info_overflow = 0x7f0d0043;
}
public static final class style {
private style() {}
public static final int TextAppearance_Compat_Notification = 0x7f0e00ed;
public static final int TextAppearance_Compat_Notification_Info = 0x7f0e00ee;
public static final int TextAppearance_Compat_Notification_Line2 = 0x7f0e00ef;
public static final int TextAppearance_Compat_Notification_Time = 0x7f0e00f0;
public static final int TextAppearance_Compat_Notification_Title = 0x7f0e00f1;
public static final int Widget_Compat_NotificationActionContainer = 0x7f0e015c;
public static final int Widget_Compat_NotificationActionText = 0x7f0e015d;
}
public static final class styleable {
private styleable() {}
public static final int[] ColorStateListItem = { 0x10101a5, 0x101031f, 0x7f030027 };
public static final int ColorStateListItem_android_color = 0;
public static final int ColorStateListItem_android_alpha = 1;
public static final int ColorStateListItem_alpha = 2;
public static final int[] FontFamily = { 0x7f030084, 0x7f030085, 0x7f030086, 0x7f030087, 0x7f030088, 0x7f030089 };
public static final int FontFamily_fontProviderAuthority = 0;
public static final int FontFamily_fontProviderCerts = 1;
public static final int FontFamily_fontProviderFetchStrategy = 2;
public static final int FontFamily_fontProviderFetchTimeout = 3;
public static final int FontFamily_fontProviderPackage = 4;
public static final int FontFamily_fontProviderQuery = 5;
public static final int[] FontFamilyFont = { 0x1010532, 0x1010533, 0x101053f, 0x101056f, 0x1010570, 0x7f030082, 0x7f03008a, 0x7f03008b, 0x7f03008c, 0x7f030142 };
public static final int FontFamilyFont_android_font = 0;
public static final int FontFamilyFont_android_fontWeight = 1;
public static final int FontFamilyFont_android_fontStyle = 2;
public static final int FontFamilyFont_android_ttcIndex = 3;
public static final int FontFamilyFont_android_fontVariationSettings = 4;
public static final int FontFamilyFont_font = 5;
public static final int FontFamilyFont_fontStyle = 6;
public static final int FontFamilyFont_fontVariationSettings = 7;
public static final int FontFamilyFont_fontWeight = 8;
public static final int FontFamilyFont_ttcIndex = 9;
public static final int[] GradientColor = { 0x101019d, 0x101019e, 0x10101a1, 0x10101a2, 0x10101a3, 0x10101a4, 0x1010201, 0x101020b, 0x1010510, 0x1010511, 0x1010512, 0x1010513 };
public static final int GradientColor_android_startColor = 0;
public static final int GradientColor_android_endColor = 1;
public static final int GradientColor_android_type = 2;
public static final int GradientColor_android_centerX = 3;
public static final int GradientColor_android_centerY = 4;
public static final int GradientColor_android_gradientRadius = 5;
public static final int GradientColor_android_tileMode = 6;
public static final int GradientColor_android_centerColor = 7;
public static final int GradientColor_android_startX = 8;
public static final int GradientColor_android_startY = 9;
public static final int GradientColor_android_endX = 10;
public static final int GradientColor_android_endY = 11;
public static final int[] GradientColorItem = { 0x10101a5, 0x1010514 };
public static final int GradientColorItem_android_color = 0;
public static final int GradientColorItem_android_offset = 1;
}
}
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.gradle;
import org.gradle.api.Buildable;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Jdk implements Buildable, Iterable<File> {
private static final List<String> ALLOWED_ARCHITECTURES = Collections.unmodifiableList(Arrays.asList("aarch64", "x64"));
private static final List<String> ALLOWED_VENDORS = Collections.unmodifiableList(Arrays.asList("adoptium", "adoptopenjdk", "openjdk"));
private static final List<String> ALLOWED_PLATFORMS = Collections.unmodifiableList(
Arrays.asList("darwin", "freebsd", "linux", "mac", "windows")
);
private static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?");
private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)\\+(b\\d+?)(@([a-f0-9]{32}))?");
private final String name;
private final Configuration configuration;
private final Property<String> vendor;
private final Property<String> version;
private final Property<String> platform;
private final Property<String> architecture;
private String baseVersion;
private String major;
private String build;
private String hash;
Jdk(String name, Configuration configuration, ObjectFactory objectFactory) {
this.name = name;
this.configuration = configuration;
this.vendor = objectFactory.property(String.class);
this.version = objectFactory.property(String.class);
this.platform = objectFactory.property(String.class);
this.architecture = objectFactory.property(String.class);
}
public String getName() {
return name;
}
public String getVendor() {
return vendor.get();
}
public void setVendor(final String vendor) {
if (ALLOWED_VENDORS.contains(vendor) == false) {
throw new IllegalArgumentException("unknown vendor [" + vendor + "] for jdk [" + name + "], must be one of " + ALLOWED_VENDORS);
}
this.vendor.set(vendor);
}
public String getVersion() {
return version.get();
}
public void setVersion(String version) {
if (VERSION_PATTERN.matcher(version).matches() == false && LEGACY_VERSION_PATTERN.matcher(version).matches() == false) {
throw new IllegalArgumentException("malformed version [" + version + "] for jdk [" + name + "]");
}
parseVersion(version);
this.version.set(version);
}
public String getPlatform() {
return platform.get();
}
public void setPlatform(String platform) {
if (ALLOWED_PLATFORMS.contains(platform) == false) {
throw new IllegalArgumentException(
"unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS
);
}
this.platform.set(platform);
}
public String getArchitecture() {
return architecture.get();
}
public void setArchitecture(final String architecture) {
String jdkArchitecture = translateJdkArchitecture(architecture);
if (ALLOWED_ARCHITECTURES.contains(jdkArchitecture) == false) {
throw new IllegalArgumentException(
"unknown architecture [" + jdkArchitecture + "] for jdk [" + name + "], must be one of " + ALLOWED_ARCHITECTURES
);
}
this.architecture.set(architecture);
}
public String getBaseVersion() {
return baseVersion;
}
public String getMajor() {
return major;
}
public String getBuild() {
return build;
}
public String getHash() {
return hash;
}
public String getPath() {
return configuration.getSingleFile().toString();
}
public String getConfigurationName() {
return configuration.getName();
}
@Override
public String toString() {
return getPath();
}
@Override
public TaskDependency getBuildDependencies() {
return configuration.getBuildDependencies();
}
public Object getBinJavaPath() {
return new Object() {
@Override
public String toString() {
return getHomeRoot() + "/bin/java";
}
};
}
public Object getJavaHomePath() {
return new Object() {
@Override
public String toString() {
return getHomeRoot();
}
};
}
private String getHomeRoot() {
boolean isOSX = "mac".equals(getPlatform()) || "darwin".equals(getPlatform());
return getPath() + (isOSX ? "/Contents/Home" : "");
}
// internal, make this jdks configuration unmodifiable
void finalizeValues() {
if (version.isPresent() == false) {
throw new IllegalArgumentException("version not specified for jdk [" + name + "]");
}
if (platform.isPresent() == false) {
throw new IllegalArgumentException("platform not specified for jdk [" + name + "]");
}
if (vendor.isPresent() == false) {
throw new IllegalArgumentException("vendor not specified for jdk [" + name + "]");
}
if (architecture.isPresent() == false) {
throw new IllegalArgumentException("architecture not specified for jdk [" + name + "]");
}
version.finalizeValue();
platform.finalizeValue();
vendor.finalizeValue();
architecture.finalizeValue();
}
@Override
public Iterator<File> iterator() {
return configuration.iterator();
}
private void parseVersion(String version) {
// decompose the bundled jdk version, broken into elements as: [feature, interim, update, build]
// Note the "patch" version is not yet handled here, as it has not yet been used by java.
Matcher jdkVersionMatcher = VERSION_PATTERN.matcher(version);
if (jdkVersionMatcher.matches() == false) {
// Try again with the pre-Java9 version format
jdkVersionMatcher = LEGACY_VERSION_PATTERN.matcher(version);
if (jdkVersionMatcher.matches() == false) {
throw new IllegalArgumentException("Malformed jdk version [" + version + "]");
}
}
baseVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "");
major = jdkVersionMatcher.group(1);
build = jdkVersionMatcher.group(3);
hash = jdkVersionMatcher.group(5);
}
private String translateJdkArchitecture(String architecture) {
/*
* Jdk uses aarch64 from ARM. Translating from arm64 to aarch64 which Jdk understands.
*/
return "arm64".equals(architecture) ? "aarch64" : architecture;
}
}
|
package com.dao.impl;
import org.springframework.stereotype.Repository;
import com.dao.UserDao;
import com.entity.User;
@Repository
public class UserDaoImpl extends BaseDaoImpl<User, Integer>implements UserDao {
}
|
package linj.lib.rnrestart;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.Callback;
import android.util.Log;
import com.facebook.soloader.SoLoader;
import com.facebook.react.bridge.JSBundleLoader;
import com.facebook.react.bridge.JavaScriptExecutorFactory;
import com.facebook.react.jscexecutor.JSCExecutorFactory;
import com.facebook.hermes.reactexecutor.HermesExecutorFactory;
import static com.facebook.react.modules.systeminfo.AndroidInfoHelpers.getFriendlyDeviceName;
import static com.facebook.react.bridge.UiThreadUtil.runOnUiThread;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import android.app.Application;
import com.facebook.react.ReactApplication;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactInstanceManager;
import androidx.annotation.Nullable;
public class RNRestartModule extends ReactContextBaseJavaModule {
private final ReactApplicationContext reactContext;
public RNRestartModule(ReactApplicationContext reactContext) {
super(reactContext);
this.reactContext = reactContext;
}
// android.os.Process.killProcess(android.os.Process.myPid())
@ReactMethod
public void restartApp(final @Nullable String jsBundleFile) {
try {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (getCurrentActivity() != null) {
Application app = getCurrentActivity().getApplication();
ReactApplication reactApp = (ReactApplication) app;
ReactNativeHost host = reactApp.getReactNativeHost();
ReactInstanceManager reactInstanceManager = host.getReactInstanceManager();
try {
Class<?> ReactInstanceManagerClazz = reactInstanceManager.getClass();
Method method = ReactInstanceManagerClazz.getDeclaredMethod("recreateReactContextInBackground", JavaScriptExecutorFactory.class, JSBundleLoader.class);
method.setAccessible(true);
method.invoke(
reactInstanceManager,
getJavaScriptExecutorFactory(app.getPackageName(), getFriendlyDeviceName()),
JSBundleLoader.createFileLoader(jsBundleFile != null ? jsBundleFile : app.getFilesDir() + "/index.android.bundle")
);
Log.v("ReactNativeRestartApp", "success!");
}
catch(NoSuchMethodException e) {
Log.v("ReactNativeRestartApp", "NoSuchMethodException");
e.printStackTrace();
}
catch(IllegalAccessException e) {
Log.v("ReactNativeRestartApp", "IllegalAccessException");
e.printStackTrace();
}
catch(InvocationTargetException e) {
Log.v("ReactNativeRestartApp", "InvocationTargetException");
e.printStackTrace();
}
catch(IllegalArgumentException e) {
Log.v("ReactNativeRestartApp", "IllegalArgumentException");
e.printStackTrace();
}
}
}
// copy from ReactInstanceManagerBuilder.getDefaultJSExecutorFactory
// line: 287
private JavaScriptExecutorFactory getJavaScriptExecutorFactory(final String appName, final String deviceName) {
try {
// If JSC is included, use it as normal
SoLoader.loadLibrary("jscexecutor");
return new JSCExecutorFactory(appName, deviceName);
}
catch (UnsatisfiedLinkError jscE) {
// Otherwise use Hermes
return new HermesExecutorFactory();
}
}
});
}
finally {}
}
@Override
public String getName() {
return "RNRestart";
}
}
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.commands.expressions.types.dtable;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.dmn.api.definition.model.DecisionRule;
import org.kie.workbench.common.dmn.api.definition.model.DecisionTable;
import org.kie.workbench.common.dmn.api.definition.model.InputClause;
import org.kie.workbench.common.dmn.api.definition.model.OutputClause;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionRuleFactory;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableDefaultValueUtilities;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DescriptionColumn;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.InputClauseColumn;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.OutputClauseColumn;
import org.kie.workbench.common.dmn.client.widgets.grid.controls.list.ListSelectorView;
import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridData;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.command.CanvasCommandResultBuilder;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.command.Command;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandResultBuilder;
import org.kie.workbench.common.stunner.core.rule.RuleViolation;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.uberfire.ext.wires.core.grids.client.model.GridData;
import org.uberfire.ext.wires.core.grids.client.model.GridRow;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridRow;
import org.uberfire.ext.wires.core.grids.client.widget.grid.columns.RowNumberColumn;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.kie.workbench.common.dmn.client.widgets.grid.model.BaseHasDynamicHeightCell.DEFAULT_HEIGHT;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class AddDecisionRuleCommandTest {
private DecisionTable dtable;
private DecisionRule rule;
private GridData uiModel;
private GridRow uiModelRow;
private DecisionTableUIModelMapper uiModelMapper;
private AddDecisionRuleCommand command;
@Mock
private RowNumberColumn uiRowNumberColumn;
@Mock
private InputClauseColumn uiInputClauseColumn;
@Mock
private OutputClauseColumn uiOutputClauseColumn;
@Mock
private DescriptionColumn uiDescriptionColumn;
@Mock
private ListSelectorView.Presenter listSelector;
@Mock
private AbstractCanvasHandler canvasHandler;
@Mock
private GraphCommandExecutionContext graphCommandExecutionContext;
@Mock
private org.uberfire.mvp.Command canvasOperation;
@Before
public void setup() {
this.dtable = new DecisionTable();
this.uiModel = new DMNGridData();
this.uiModel.appendColumn(uiRowNumberColumn);
this.uiModelRow = new BaseGridRow();
this.uiModelMapper = new DecisionTableUIModelMapper(() -> uiModel,
() -> Optional.of(dtable),
listSelector,
DEFAULT_HEIGHT);
doReturn(0).when(uiRowNumberColumn).getIndex();
doReturn(1).when(uiInputClauseColumn).getIndex();
doReturn(2).when(uiOutputClauseColumn).getIndex();
doReturn(3).when(uiDescriptionColumn).getIndex();
}
private void makeCommand(final int index) {
this.rule = DecisionRuleFactory.makeDecisionRule(dtable);
this.command = spy(new AddDecisionRuleCommand(dtable,
rule,
uiModel,
uiModelRow,
index,
uiModelMapper,
canvasOperation));
}
@Test
public void testGraphCommandAllow() throws Exception {
makeCommand(0);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandCheck() throws Exception {
makeCommand(0);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandExecuteConstructedDescription() {
makeCommand(0);
assertEquals(0, dtable.getRule().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(1, dtable.getRule().size());
assertEquals(rule, dtable.getRule().get(0));
assertTrue(rule.getDescription() != null);
assertTrue(rule.getDescription().getValue() != null);
assertEquals(DecisionTableDefaultValueUtilities.RULE_DESCRIPTION, rule.getDescription().getValue());
assertEquals(dtable,
rule.getParent());
}
@Test
public void testGraphCommandExecuteConstructedRuleInputs() {
assertEquals(0, dtable.getRule().size());
final int inputsCount = 2;
for (int i = 0; i < inputsCount; i++) {
dtable.getInput().add(new InputClause());
}
makeCommand(0);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(1, dtable.getRule().size());
assertEquals(rule, dtable.getRule().get(0));
assertEquals(inputsCount, rule.getInputEntry().size());
assertEquals(0, rule.getOutputEntry().size());
for (int inputIndex = 0; inputIndex < inputsCount; inputIndex++) {
assertTrue(rule.getInputEntry().get(inputIndex).getText() != null);
assertEquals(DecisionTableDefaultValueUtilities.INPUT_CLAUSE_UNARY_TEST_TEXT, rule.getInputEntry().get(inputIndex).getText().getValue());
assertEquals(rule, rule.getInputEntry().get(inputIndex).getParent());
}
assertEquals(dtable,
rule.getParent());
}
@Test
public void testGraphCommandExecuteConstructedRuleOutputs() {
assertEquals(0, dtable.getRule().size());
final int outputsCount = 2;
for (int i = 0; i < outputsCount; i++) {
dtable.getOutput().add(new OutputClause());
}
makeCommand(0);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(1, dtable.getRule().size());
assertEquals(rule, dtable.getRule().get(0));
assertEquals(0, rule.getInputEntry().size());
assertEquals(outputsCount, rule.getOutputEntry().size());
for (int outputIndex = 0; outputIndex < outputsCount; outputIndex++) {
assertTrue(rule.getOutputEntry().get(outputIndex).getText() != null);
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, rule.getOutputEntry().get(outputIndex).getText().getValue());
assertEquals(rule, rule.getOutputEntry().get(outputIndex).getParent());
}
assertEquals(dtable,
rule.getParent());
}
@Test
public void testGraphCommandUndo() {
makeCommand(0);
assertEquals(0, dtable.getRule().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
assertEquals(1, dtable.getRule().size());
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
assertEquals(0, dtable.getRule().size());
}
@Test
public void testGraphCommandExecuteInsertBelow() {
//The default behaviour of tests in this class is to "insert above"
final DecisionRule existingRule = new DecisionRule();
dtable.getRule().add(existingRule);
makeCommand(1);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
assertEquals(2,
dtable.getRule().size());
assertEquals(existingRule,
dtable.getRule().get(0));
assertEquals(rule,
dtable.getRule().get(1));
}
@Test
public void testGraphCommandExecuteInsertBelowThenUndo() {
//The default behaviour of tests in this class is to "insert above"
final DecisionRule existingRule = new DecisionRule();
dtable.getRule().add(existingRule);
makeCommand(1);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
graphCommand.undo(graphCommandExecutionContext);
assertEquals(1,
dtable.getRule().size());
assertEquals(existingRule,
dtable.getRule().get(0));
}
@Test
public void testCanvasCommandAllow() throws Exception {
makeCommand(0);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.allow(canvasHandler));
}
@Test
public void testCanvasCommandAddRuleAndThenUndo() throws Exception {
dtable.getInput().add(new InputClause());
dtable.getOutput().add(new OutputClause());
makeCommand(0);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
uiModel.appendColumn(uiInputClauseColumn);
uiModel.appendColumn(uiOutputClauseColumn);
uiModel.appendColumn(uiDescriptionColumn);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddRuleCommand = command.newCanvasCommand(canvasHandler);
canvasAddRuleCommand.execute(canvasHandler);
assertEquals(1, uiModel.getRowCount());
assertDefaultUiRowValues(0);
canvasAddRuleCommand.undo(canvasHandler);
assertEquals(0, uiModel.getRowCount());
// one time in execute(), one time in undo()
verify(canvasOperation, times(2)).execute();
verify(command, times(2)).updateRowNumbers();
verify(command, times(2)).updateParentInformation();
}
@Test
public void testCanvasCommandExecuteInsertBelow() {
//The default behaviour of tests in this class is to "insert above"
final DecisionRule existingRule = new DecisionRule();
final GridRow existingUiRow = new BaseGridRow();
dtable.getRule().add(existingRule);
uiModel.appendRow(existingUiRow);
dtable.getInput().add(new InputClause());
dtable.getOutput().add(new OutputClause());
makeCommand(1);
uiModel.appendColumn(uiInputClauseColumn);
uiModel.appendColumn(uiOutputClauseColumn);
uiModel.appendColumn(uiDescriptionColumn);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
canvasCommand.execute(canvasHandler);
assertEquals(2,
uiModel.getRowCount());
assertEquals(existingUiRow,
uiModel.getRow(0));
assertEquals(uiModelRow,
uiModel.getRow(1));
assertDefaultUiRowValues(1);
verify(command).updateRowNumbers();
verify(command).updateParentInformation();
}
@Test
public void testCanvasCommandExecuteInsertBelowThenUndo() {
//The default behaviour of tests in this class is to "insert above"
final DecisionRule existingRule = new DecisionRule();
final GridRow existingUiRow = new BaseGridRow();
dtable.getRule().add(existingRule);
uiModel.appendRow(existingUiRow);
makeCommand(1);
uiModel.appendColumn(uiInputClauseColumn);
uiModel.appendColumn(uiOutputClauseColumn);
uiModel.appendColumn(uiDescriptionColumn);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
canvasCommand.execute(canvasHandler);
canvasCommand.undo(canvasHandler);
assertEquals(1,
uiModel.getRowCount());
assertEquals(existingUiRow,
uiModel.getRow(0));
// one time in execute(), one time in undo()
verify(canvasOperation, times(2)).execute();
verify(command, times(2)).updateRowNumbers();
verify(command, times(2)).updateParentInformation();
}
private void assertDefaultUiRowValues(final int uiRowIndex) {
final GridRow uiGridRow = uiModel.getRow(uiRowIndex);
assertEquals(uiRowIndex + 1, uiGridRow.getCells().get(0).getValue().getValue());
assertEquals(DecisionTableDefaultValueUtilities.INPUT_CLAUSE_UNARY_TEST_TEXT, uiGridRow.getCells().get(1).getValue().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, uiGridRow.getCells().get(2).getValue().getValue());
assertEquals(DecisionTableDefaultValueUtilities.RULE_DESCRIPTION, uiGridRow.getCells().get(3).getValue().getValue());
}
}
|
package com.draga.spaceTravels3.manager;
import com.badlogic.gdx.Gdx;
import com.draga.errorHandler.ErrorHandlerProvider;
import com.draga.spaceTravels3.ui.Screen;
import com.google.common.base.Joiner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Stack;
public abstract class ScreenManager
{
private static final String LOGGING_TAG = ScreenManager.class.getSimpleName();
private static Stack<Screen> screens;
private static LinkedList<Screen> screensToAdd;
private static LinkedList<Screen> screensToRemove;
private static HashMap<Screen, ArrayList<Screen>> screenBlockedScreens;
public static void create()
{
screens = new Stack<>();
screensToAdd = new LinkedList<>();
screensToRemove = new LinkedList<>();
screenBlockedScreens = new HashMap<>();
}
public static void addScreen(Screen screen)
{
screensToAdd.addLast(screen);
}
public static void removeScreen(Screen screen)
{
screensToRemove.addLast(screen);
}
public static void render(float deltaTime)
{
updateScreens();
for (Screen screen : screens)
{
screen.render(deltaTime);
}
}
private static void updateScreens()
{
if (!screensToRemove.isEmpty())
{
while (!screensToRemove.isEmpty())
{
Screen screenToRemove = screensToRemove.removeFirst();
Gdx.app.debug(LOGGING_TAG, "Removing " + screenToRemove.getClass().getSimpleName());
if (screens.contains(screenToRemove))
{
if (screenBlockedScreens.containsKey(screenToRemove))
{
ArrayList<Screen> screensToUnblock =
screenBlockedScreens.get(screenToRemove);
Gdx.app.debug(
LOGGING_TAG,
"Unblocking " + Joiner.on(", ").join(screensToUnblock));
for (Screen screenToUnblock : screensToUnblock)
{
ScreenManager.screens.add(screenToUnblock);
}
screenBlockedScreens.remove(screenToRemove);
}
screens.remove(screenToRemove);
screenToRemove.hide();
screenToRemove.dispose();
}
else
{
ErrorHandlerProvider.handle(
LOGGING_TAG,
"Trying to remove "
+ screenToRemove.getClass()
+ " that is not in the list!");
}
}
screens.peek().show();
}
while (!screensToAdd.isEmpty())
{
Screen screenToAdd = screensToAdd.removeFirst();
Gdx.app.debug(LOGGING_TAG, "Adding " + screenToAdd.getClass().getSimpleName());
screenToAdd.show();
if (screenToAdd.blockParents())
{
ArrayList<Screen> blockedScreens = new ArrayList<>();
for (Screen screen : screens)
{
if (screen.isBlockable())
{
screen.hide();
blockedScreens.add(screen);
}
}
if (!blockedScreens.isEmpty())
{
Gdx.app.debug(LOGGING_TAG, "Blocking " + Joiner.on(", ").join(blockedScreens));
screens.removeAll(blockedScreens);
screenBlockedScreens.put(screenToAdd, blockedScreens);
}
}
screens.add(screenToAdd);
screenToAdd.onAdded();
}
}
public static void dispose()
{
for (Screen screen : screens)
{
screen.dispose();
}
}
public static void resume()
{
if (!screens.empty())
{
screens.peek().resume();
}
}
public static void pause()
{
if (!screens.empty())
{
screens.peek().pause();
}
}
}
|
package com.github.fanavarro.graphlib.algorithms.subtree;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.junit.Test;
import org.meanbean.lang.EquivalentFactory;
import org.meanbean.lang.Factory;
import org.meanbean.test.BeanTester;
import org.meanbean.test.Configuration;
import org.meanbean.test.EqualsMethodTester;
import org.meanbean.test.HashCodeMethodTester;
import com.github.fanavarro.graphlib.FakeGraph;
import com.github.fanavarro.graphlib.Graph;
import com.github.fanavarro.graphlib.SimpleTreeImpl;
import com.github.fanavarro.graphlib.Tree;
import com.github.fanavarro.graphlib.algorithms.subtree.SubtreeInput;
import com.github.fanavarro.graphlib.algorithms.subtree.SubtreeOutput;
import com.github.fanavarro.graphlib.test_config.GraphTestFactory;
import com.github.fanavarro.graphlib.test_config.MeanBeanConfigurationBase;
/**
* The Class SubtreeOutputTest.
*/
public class SubtreeOutputTest {
/** The configuration. */
private final Configuration configuration = MeanBeanConfigurationBase.getConfigurationBuilderBase()
.overrideFactory("input", new InputFactory()).build();
/**
* Test getters and setters.
*/
@Test
public void testGettersAndSetters() {
new BeanTester().testBean(SubtreeOutput.class, configuration);
}
/**
* Test equals.
*/
@Test
public void testEquals() {
EqualsMethodTester tester = new EqualsMethodTester();
tester.getFactoryCollection().addFactory(Graph.class, new GraphTestFactory());
tester.testEqualsMethod(new SubtreeOutputEquivalentFactory(), configuration);
}
/**
* Test equals.
*/
@Test
public void testEquals2() {
SubtreeOutput<String, String> o1;
SubtreeOutput<String, String> o2;
o1 = new SubtreeOutputEquivalentFactory().create();
o2 = new SubtreeOutputEquivalentFactory().create();
assertTrue(o1.equals(o2));
assertTrue(o2.equals(o1));
assertTrue(o1.equals(o1));
assertTrue(o2.equals(o2));
assertTrue(o1.hashCode() == o2.hashCode());
o1 = new SubtreeOutputEquivalentFactory().create();
o2 = new SubtreeOutputEquivalentFactory().create();
o2.setInput(null);
assertFalse(o1.equals(o2));
assertFalse(o2.equals(o1));
assertTrue(o1.equals(o1));
assertTrue(o2.equals(o2));
assertFalse(o1.hashCode() == o2.hashCode());
o1 = new SubtreeOutputEquivalentFactory().create();
o2 = new SubtreeOutputEquivalentFactory().create();
o2.setTrees(null);
assertFalse(o1.equals(o2));
assertFalse(o2.equals(o1));
assertTrue(o1.equals(o1));
assertTrue(o2.equals(o2));
assertFalse(o1.hashCode() == o2.hashCode());
}
/**
* Test hash.
*/
@Test
public void testHash() {
HashCodeMethodTester tester = new HashCodeMethodTester();
tester.getFactoryCollection().addFactory(Graph.class, new GraphTestFactory());
tester.testHashCodeMethod(new SubtreeOutputEquivalentFactory());
}
/**
* A factory for creating SubtreeOutputEquivalent objects.
*/
private class SubtreeOutputEquivalentFactory implements EquivalentFactory<SubtreeOutput<String, String>> {
/*
* (non-Javadoc)
*
* @see org.meanbean.lang.EquivalentFactory#create()
*/
@Override
public SubtreeOutput<String, String> create() {
SubtreeOutput<String, String> output = new SubtreeOutput<String, String>();
SubtreeInput<String, String> input = new InputFactory().create();
output.setTrees(createExampleTrees());
output.setInput(input);
return output;
}
}
/**
* A factory for creating Input objects.
*/
private class InputFactory implements Factory<SubtreeInput<String, String>> {
/*
* (non-Javadoc)
*
* @see org.meanbean.lang.Factory#create()
*/
@Override
public SubtreeInput<String, String> create() {
SubtreeInput<String, String> input = new SubtreeInput<String, String>();
input.setGraph(new FakeGraph());
input.setNodesToBeContained(new HashSet<String>(Arrays.asList("B", "C", "F")));
return input;
}
}
/**
* Creates the sample trees.
*
* @return the sets the
*/
private Set<Tree<String, String>> createExampleTrees() {
SimpleTreeImpl<String, String> tree1 = new SimpleTreeImpl<String, String>();
tree1.addNode("B", "2", "C");
tree1.addNode("B", "3", "D");
tree1.addNode("D", "4", "E");
tree1.addNode("E", "5", "F");
tree1.addNode("E", "6", "F");
SimpleTreeImpl<String, String> tree2 = new SimpleTreeImpl<String, String>();
tree2.addNode("C", "8", "B");
tree2.addNode("B", "3", "D");
tree2.addNode("D", "4", "E");
tree2.addNode("E", "5", "F");
tree2.addNode("E", "6", "F");
SimpleTreeImpl<String, String> tree3 = new SimpleTreeImpl<String, String>();
tree3.addNode("E", "7", "C");
tree3.addNode("C", "8", "B");
tree3.addNode("E", "5", "F");
tree3.addNode("E", "6", "F");
Set<Tree<String, String>> exampleTrees = new HashSet<Tree<String, String>>();
exampleTrees.add(tree1);
exampleTrees.add(tree2);
exampleTrees.add(tree3);
return exampleTrees;
}
}
|
import java.util.*;
public class getindex {
public static void main(String args[])
{
String s1="my name is yash";
System.out.println(s1.indexOf("s"));
int n=s1.indexOf("s",20); //becuse this string is smaller than index 20
System.out.println(n);
int m=s1.indexOf("s",10);
System.out.println(m);
System.out.println(s1.lastIndexOf("s")); //
String s="mississippi";
System.out.println(s.indexOf("i"));
System.out.println(s.lastIndexOf("i"));
System.out.println(s.indexOf("i",5));
}
}
|
/* Another example class.
* Again, you don't need to modify this.
*/
public class Point3D extends Point {
public float z;
public Point3D(float x, float y, float z) {
super(x, y);
this.z = z;
}
@Override
public boolean equals(Object o) {
return (o instanceof Point3D)
&& x == ((Point3D)o).x
&& y == ((Point3D)o).y
&& z == ((Point3D)o).z;
}
public int hashCode(){
return super.hashCode();
}
}
|
package playgrounds;
import various.Gender;
import various.Person;
public class Family implements Playground {
private Person mother;
private Person father;
private Person son;
private Person daughter;
@Override
public void playground() {
mother = new Person("Moira", Gender.FEMALE);
father = new Person("Robert", Gender.MALE);
son = new Person("Oliver", Gender.MALE);
daughter = new Person("Thea", Gender.FEMALE);
mother.setChildren(new Person[]{son, daughter});
father.setChildren(new Person[]{son, daughter});
son.setSiblings(new Person[]{daughter});
daughter.setSiblings(new Person[]{son});
son.setMother(mother);
daughter.setMother(mother);
son.setFather(father);
daughter.setFather(father);
String toString = toString();
System.out.println(toString);
}
@Override
public String toString() {
var text = new StringBuilder("");
text.append("Mother: ").append(mother.getName()).append("\n");
text.append("Father: ").append(father.getName()).append("\n");
text.append("Son: ").append(son.getName()).append("\n");
text.append("Daughter: ").append(daughter.getName());
return text.toString();
}
}
|
/*
*
* * Copyright (c) 2020 Stefan Spiska (Vitasystems GmbH) and Hannover Medical School
* * This file is part of Project EHRbase
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.ehrbase.serialisation.util;
/** Created by christian on 8/1/2017. */
public class SnakeCase {
protected String aString;
public SnakeCase(String aString) {
this.aString = aString;
}
public String camelToSnake() {
StringBuilder buffer = new StringBuilder();
for (int i = 0; i < aString.length(); i++) {
if (Character.isUpperCase(aString.charAt(i))) {
if (i > 0 && aString.charAt(i - 1) != '<') {
buffer.append('_');
}
buffer.append(Character.toLowerCase(aString.charAt(i)));
} else {
buffer.append(aString.charAt(i));
}
}
return buffer.toString();
}
public String camelToUpperSnake() {
return camelToSnake()
.toUpperCase()
.replace("I_S_M", "ISM")
.replace("_I_D", "_ID")
.replace("_E_H_R", "_EHR")
.replace("_U_R_I", "_URI");
}
}
|
package polymorphism3;
public class Student extends Person {
@Override
void display(){
System.out.println("I am student");
}
}
|
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.plugin;
import me.lucko.luckperms.api.Contexts;
import me.lucko.luckperms.api.platform.PlatformType;
import me.lucko.luckperms.common.actionlog.LogDispatcher;
import me.lucko.luckperms.common.api.LuckPermsApiProvider;
import me.lucko.luckperms.common.buffers.BufferedRequest;
import me.lucko.luckperms.common.caching.handlers.CachedStateManager;
import me.lucko.luckperms.common.calculators.CalculatorFactory;
import me.lucko.luckperms.common.commands.CommandManager;
import me.lucko.luckperms.common.commands.abstraction.Command;
import me.lucko.luckperms.common.commands.sender.Sender;
import me.lucko.luckperms.common.commands.utils.CommandUtils;
import me.lucko.luckperms.common.config.LuckPermsConfiguration;
import me.lucko.luckperms.common.contexts.ContextManager;
import me.lucko.luckperms.common.dependencies.DependencyManager;
import me.lucko.luckperms.common.dependencies.classloader.PluginClassLoader;
import me.lucko.luckperms.common.event.EventFactory;
import me.lucko.luckperms.common.inheritance.InheritanceHandler;
import me.lucko.luckperms.common.locale.LocaleManager;
import me.lucko.luckperms.common.logging.Logger;
import me.lucko.luckperms.common.managers.group.GroupManager;
import me.lucko.luckperms.common.managers.track.TrackManager;
import me.lucko.luckperms.common.managers.user.UserManager;
import me.lucko.luckperms.common.messaging.InternalMessagingService;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.storage.Storage;
import me.lucko.luckperms.common.storage.dao.file.FileWatcher;
import me.lucko.luckperms.common.treeview.PermissionVault;
import me.lucko.luckperms.common.verbose.VerboseHandler;
import java.io.File;
import java.io.InputStream;
import java.util.*;
import java.util.stream.Stream;
/**
* Main internal interface for LuckPerms plugins, providing the base for
* abstraction throughout the project.
*
* All plugin platforms implement this interface.
*/
public interface LuckPermsPlugin {
/**
* Gets the user manager instance for the platform
*
* @return the user manager
*/
UserManager<?> getUserManager();
/**
* Gets the group manager instance for the platform
*
* @return the group manager
*/
GroupManager<?> getGroupManager();
/**
* Gets the track manager instance for the platform
*
* @return the track manager
*/
TrackManager<?> getTrackManager();
/**
* Gets the plugin's configuration
*
* @return the plugin config
*/
LuckPermsConfiguration getConfiguration();
/**
* Gets the primary data storage instance. This is likely to be wrapped with extra layers for caching, etc.
*
* @return the storage handler instance
*/
Storage getStorage();
/**
* Gets the messaging service.
*
* @return the messaging service
*/
Optional<InternalMessagingService> getMessagingService();
/**
* Sets the messaging service.
*
* @param service the service
*/
void setMessagingService(InternalMessagingService service);
/**
* Gets a wrapped logger instance for the platform.
*
* @return the plugin's logger
*/
Logger getLog();
/**
* Gets the event factory
*
* @return the event factory
*/
EventFactory getEventFactory();
/**
* Returns the class implementing the LuckPermsAPI on this platform.
*
* @return the api
*/
LuckPermsApiProvider getApiProvider();
/**
* Gets the command manager
*
* @return the command manager
*/
CommandManager getCommandManager();
/**
* Gets the instance providing locale translations for the plugin
*
* @return the locale manager
*/
LocaleManager getLocaleManager();
/**
* Gets the classloader wrapper for adding dependencies to the classpath
*
* @return the plugin classloader
*/
PluginClassLoader getPluginClassLoader();
/**
* Gets the dependency manager for the plugin
*
* @return the dependency manager
*/
DependencyManager getDependencyManager();
/**
* Gets the context manager.
* This object handles context accumulation for all players on the platform.
*
* @return the context manager
*/
ContextManager<?> getContextManager();
/**
* Gets the inheritance handler
*
* @return the inheritance handler
*/
InheritanceHandler getInheritanceHandler();
/**
* Gets the cached state manager for the platform.
*
* @return the cached state manager
*/
CachedStateManager getCachedStateManager();
/**
* Gets the class responsible for constructing PermissionCalculators on this platform.
*
* @return the permission calculator factory
*/
CalculatorFactory getCalculatorFactory();
/**
* Gets the verbose debug handler instance.
*
* @return the debug handler instance
*/
VerboseHandler getVerboseHandler();
/**
* Gets the permission caching instance for the platform.
*
* @return the permission cache instance
*/
PermissionVault getPermissionVault();
/**
* Gets the log dispatcher running on the platform
*
* @return the log dispatcher
*/
LogDispatcher getLogDispatcher();
/**
* Gets the LuckPerms Scheduler instance
*
* @return the scheduler
*/
SchedulerAdapter getScheduler();
/**
* Gets the file watcher running on the platform
*
* @return the file watcher
*/
Optional<FileWatcher> getFileWatcher();
/**
* Gets a string of the plugin's version
*
* @return the version of the plugin
*/
String getVersion();
/**
* Gets the platform type this instance of LuckPerms is running on.
*
* @return the platform type
*/
PlatformType getServerType();
/**
* Gets the name or "brand" of the running platform
*
* @return the server brand
*/
String getServerBrand();
/**
* Gets the version of the running platform
*
* @return the server version
*/
String getServerVersion();
/**
* Gets the name associated with this server
*
* @return the server name
*/
default String getServerName() {
return null;
}
/**
* Gets the time when the plugin first started in millis.
*
* @return the enable time
*/
long getStartTime();
/**
* Gets the plugins main data storage directory
*
* <p>Bukkit: /root/plugins/LuckPerms</p>
* <p>Bungee: /root/plugins/LuckPerms</p>
* <p>Sponge: /root/luckperms/</p>
*
* @return the platforms data folder
*/
File getDataDirectory();
/**
* Gets the plugins config directory.
*
* <p>This is the same as {@link #getDataDirectory()} on Bukkit/Bungee, but different on Sponge.</p>
*
* @return the platforms config folder
*/
default File getConfigDirectory() {
return getDataDirectory();
}
/**
* Gets a bundled resource file from the jar
*
* @param path the path of the file
* @return the file as an input stream
*/
InputStream getResourceStream(String path);
/**
* Gets a player object linked to this User. The returned object must be the same type
* as the instance used in the platforms {@link ContextManager}
*
* @param user the user instance
* @return a player object, or null, if one couldn't be found.
*/
Object getPlayer(User user);
/**
* Lookup a uuid from a username, using the servers internal uuid cache.
*
* @param username the username to lookup
* @return an optional uuid, if found
*/
Optional<UUID> lookupUuid(String username);
/**
* Gets a calculated context instance for the user using the rules of the platform.
*
* @param user the user instance
* @return a contexts object, or null if one couldn't be generated
*/
Optional<Contexts> getContextForUser(User user);
/**
* Gets the number of users online on the platform
*
* @return the number of users
*/
int getPlayerCount();
/**
* Gets the usernames of the users online on the platform
*
* @return a {@link List} of usernames
*/
Stream<String> getPlayerList();
/**
* Gets the UUIDs of the users online on the platform
*
* @return a {@link Set} of UUIDs
*/
Stream<UUID> getOnlinePlayers();
/**
* Checks if a user is online
*
* @param external the users external uuid
* @return true if the user is online
*/
boolean isPlayerOnline(UUID external);
/**
* Gets a list of online Senders on the platform
*
* @return a {@link List} of senders online on the platform
*/
Stream<Sender> getOnlineSenders();
/**
* Gets the console.
*
* @return the console sender of the instance
*/
Sender getConsoleSender();
/**
* Gets the unique players which have connected to the server since it started.
*
* @return the unique connections
*/
Set<UUID> getUniqueConnections();
default List<Command> getExtraCommands() {
return Collections.emptyList();
}
/**
* Gets the update task buffer of the platform, used for scheduling and running update tasks.
*
* @return the update task buffer instance
*/
BufferedRequest<Void> getUpdateTaskBuffer();
/**
* Called at the end of the sync task.
*/
default void onPostUpdate() {
}
default void sendStartupBanner(Sender sender) {
sender.sendMessage(CommandUtils.color("&b __ &3 __ ___ __ __ "));
sender.sendMessage(CommandUtils.color("&b | | | / ` |__/ &3|__) |__ |__) |\\/| /__` "));
sender.sendMessage(CommandUtils.color("&b |___ \\__/ \\__, | \\ &3| |___ | \\ | | .__/ "));
sender.sendMessage(CommandUtils.color(" "));
sender.sendMessage(CommandUtils.color("&2 Loading version &bv" + getVersion() + "&2 on " + getServerType().getFriendlyName() + " - " + getServerBrand()));
sender.sendMessage(CommandUtils.color("&8 Running on server version " + getServerVersion()));
sender.sendMessage(CommandUtils.color(" "));
}
}
|
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.ui.listener.evaluation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import javax.faces.event.AbortProcessingException;
import javax.faces.event.ActionEvent;
import javax.faces.event.ActionListener;
import javax.faces.event.ValueChangeEvent;
import javax.faces.event.ValueChangeListener;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sakaiproject.tool.assessment.data.dao.grading.ItemGradingData;
import org.sakaiproject.tool.assessment.services.GradingService;
import org.sakaiproject.tool.assessment.services.GradebookServiceException;
import org.sakaiproject.tool.assessment.services.assessment.PublishedAssessmentService;
import org.sakaiproject.tool.assessment.ui.bean.evaluation.AgentResults;
import org.sakaiproject.tool.assessment.ui.bean.evaluation.HistogramScoresBean;
import org.sakaiproject.tool.assessment.ui.bean.evaluation.QuestionScoresBean;
import org.sakaiproject.tool.assessment.ui.bean.evaluation.TotalScoresBean;
import org.sakaiproject.tool.assessment.ui.listener.evaluation.util.EvaluationListenerUtil;
import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil;
import org.sakaiproject.tool.assessment.util.BeanSort;
/**
* <p>
* This handles the updating of the Question Score page.
* </p>
* <p>Description: Action Listener Evaluation Updating Question Score front door</p>
* <p>Copyright: Copyright (c) 2004</p>
* <p>Organization: Sakai Project</p>
* @author Ed Smiley
* @version $Id$
*/
public class QuestionScorePagerListener
implements ActionListener, ValueChangeListener
{
private static Logger log = LoggerFactory.getLogger(QuestionScorePagerListener.class);
//private static EvaluationListenerUtil util;
//private static BeanSort bs;
//private static ContextUtil cu;
/**
* Standard process action method.
* @param event ActionEvent
* @throws AbortProcessingException
*/
public void processAction(ActionEvent event) throws
AbortProcessingException
{
log.debug("processAction");
QuestionScoresBean bean = (QuestionScoresBean)ContextUtil.lookupBean("questionScores");
setMaxDisplayedScoreRows(bean, false);
}
/**
* Process a value change.
*/
public void processValueChange(ValueChangeEvent event)
{
log.debug("processValueChange");
QuestionScoresBean bean = (QuestionScoresBean)ContextUtil.lookupBean("questionScores");
setMaxDisplayedScoreRows(bean, true);
}
private void setMaxDisplayedScoreRows(QuestionScoresBean bean, boolean isValueChange) {
PublishedAssessmentService pubService = new PublishedAssessmentService();
String itemId = ContextUtil.lookupParam("itemId");
if (ContextUtil.lookupParam("newItemId") != null && !ContextUtil.lookupParam("newItemId").trim().equals("")) {
itemId = ContextUtil.lookupParam("newItemId");
}
Long itemType = pubService.getItemType(itemId);
// For audiio question, default the paging number to 5
if (isValueChange) {
if (itemType.equals(Long.valueOf("7"))){
bean.setAudioMaxDisplayedScoreRows(bean.getMaxDisplayedRows());
bean.setHasAudioMaxDisplayedScoreRowsChanged(true);
}
else {
bean.setOtherMaxDisplayedScoreRows(bean.getMaxDisplayedRows());
}
}
else {
if (itemType.equals(Long.valueOf("7"))){
if (bean.getHasAudioMaxDisplayedScoreRowsChanged()) {
bean.setMaxDisplayedRows(bean.getAudioMaxDisplayedScoreRows());
}
else {
bean.setMaxDisplayedRows(5);
bean.setAudioMaxDisplayedScoreRows(5);
}
}
else {
bean.setMaxDisplayedRows(bean.getOtherMaxDisplayedScoreRows());
}
}
}
}
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpInfo;
import org.elasticsearch.ingest.IngestInfo;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.monitor.os.OsInfo;
import org.elasticsearch.monitor.process.ProcessInfo;
import org.elasticsearch.search.aggregations.support.AggregationInfo;
import org.elasticsearch.threadpool.ThreadPoolInfo;
import org.elasticsearch.transport.TransportInfo;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements ToXContentFragment {
public NodesInfoResponse(StreamInput in) throws IOException {
super(in);
}
public NodesInfoResponse(ClusterName clusterName, List<NodeInfo> nodes, List<FailedNodeException> failures) {
super(clusterName, nodes, failures);
}
@Override
protected List<NodeInfo> readNodesFrom(StreamInput in) throws IOException {
return in.readList(NodeInfo::new);
}
@Override
protected void writeNodesTo(StreamOutput out, List<NodeInfo> nodes) throws IOException {
out.writeList(nodes);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("nodes");
for (NodeInfo nodeInfo : getNodes()) {
builder.startObject(nodeInfo.getNode().getId());
builder.field("name", nodeInfo.getNode().getName());
builder.field("transport_address", nodeInfo.getNode().getAddress().toString());
builder.field("host", nodeInfo.getNode().getHostName());
builder.field("ip", nodeInfo.getNode().getHostAddress());
builder.field("version", nodeInfo.getVersion());
builder.field("build_flavor", nodeInfo.getBuild().flavor().displayName());
builder.field("build_type", nodeInfo.getBuild().type().displayName());
builder.field("build_hash", nodeInfo.getBuild().hash());
if (nodeInfo.getTotalIndexingBuffer() != null) {
builder.humanReadableField("total_indexing_buffer", "total_indexing_buffer_in_bytes", nodeInfo.getTotalIndexingBuffer());
}
builder.startArray("roles");
for (DiscoveryNodeRole role : nodeInfo.getNode().getRoles()) {
builder.value(role.roleName());
}
builder.endArray();
if (nodeInfo.getNode().getAttributes().isEmpty() == false) {
builder.startObject("attributes");
for (Map.Entry<String, String> entry : nodeInfo.getNode().getAttributes().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}
if (nodeInfo.getSettings() != null) {
builder.startObject("settings");
Settings settings = nodeInfo.getSettings();
settings.toXContent(builder, params);
builder.endObject();
}
if (nodeInfo.getInfo(OsInfo.class) != null) {
nodeInfo.getInfo(OsInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(ProcessInfo.class) != null) {
nodeInfo.getInfo(ProcessInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(JvmInfo.class) != null) {
nodeInfo.getInfo(JvmInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(ThreadPoolInfo.class) != null) {
nodeInfo.getInfo(ThreadPoolInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(TransportInfo.class) != null) {
nodeInfo.getInfo(TransportInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(HttpInfo.class) != null) {
nodeInfo.getInfo(HttpInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(PluginsAndModules.class) != null) {
nodeInfo.getInfo(PluginsAndModules.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(IngestInfo.class) != null) {
nodeInfo.getInfo(IngestInfo.class).toXContent(builder, params);
}
if (nodeInfo.getInfo(AggregationInfo.class) != null) {
nodeInfo.getInfo(AggregationInfo.class).toXContent(builder, params);
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
}
}
|
package com.halo.blog.entity;
import com.baomidou.mybatisplus.annotation.*;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import javax.validation.constraints.Email;
import javax.validation.constraints.NotBlank;
import java.io.Serializable;
import java.time.LocalDateTime;
/**
* 用户表
*
* @author HALO
* @since 2021-07-02
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@TableName("m_user")
public class User implements Serializable {
private static final long serialVersionUID = 1L;
@TableId(value = "id", type = IdType.AUTO)
private Long id;
@NotBlank(message = "昵称不能为空")
private String username;
private String avatar;
@NotBlank(message = "邮箱不能为空")
@Email(message = "邮箱格式不正确")
private String email;
private String password;
private Integer status;
@TableField(fill = FieldFill.INSERT)
private LocalDateTime created;
private LocalDateTime lastLogin;
}
|
package com.coolweather.app.db;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteDatabase.CursorFactory;
import android.database.sqlite.SQLiteOpenHelper;
public class CoolWeatherOpenHelper extends SQLiteOpenHelper {
public CoolWeatherOpenHelper(Context context, String name, CursorFactory factory, int version) {
super(context, name, factory, version);
// TODO Auto-generated constructor stub
}
public static final String CREATE_PROVINCE = "create table Province("
+ "id integer primary key autoincrement, "
+ "province_name text, "
+ "province_code text)";
public static final String CREATE_CITY = "create table City("
+ "id integer primary key autoincrement,"
+ "city_name text,"
+ "city_code text,"
+ "province_id integer)";
public static final String CREATE_COUNTY = "create table County("
+ "id integer primary key autoincrement, "
+ "county_name text, "
+ "county_code text, "
+ "city_id integer) ";
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_PROVINCE);
db.execSQL(CREATE_CITY);
db.execSQL(CREATE_COUNTY);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// TODO Auto-generated method stub
}
}
|
package com.webprague.service;
import java.sql.Timestamp;
import java.sql.Date;
import java.util.List;
public interface DateService {
public int addDate(int num, Date dateTime);
public List<com.webprague.model.Date> findAllByMonth(Date start_month, Date end_month);
}
|
package com.cmccarthy.api;
import io.cucumber.junit.CucumberOptions;
import net.serenitybdd.cucumber.CucumberWithSerenity;
import org.junit.runner.RunWith;
@RunWith(CucumberWithSerenity.class)
@CucumberOptions(
plugin = {"pretty", "json:target/cucumber/report.json"},
features = "src/test/resources/features/WeatherTest.feature",
glue = {"com/cmccarthy/api", "com/cmccarthy/common"},
tags = "not @ignore"
)
public class WeatherRunnerTest {
}
|
package cn.roothub.bbs.module.sys.service.impl;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.gson.reflect.TypeToken;
import cn.roothub.bbs.third.service.RedisService;
import cn.roothub.bbs.module.sys.dao.SystemConfigDao;
import cn.roothub.bbs.module.sys.model.SystemConfig;
import cn.roothub.bbs.common.util.JsonUtil;
import cn.roothub.bbs.common.util.RedisConstants;
/**
* <p></p>
* @author: miansen.wang
* @date: 2019-04-02
*/
@Service
public class UploadConfig extends AbstractUploadConfig {
private Logger log = LoggerFactory.getLogger(UploadConfig.class);
@Autowired
private SystemConfigDao systemConfigDao;
@Autowired
private RedisService redisService;
//上传配置
private Map<String,Object> uploadConfig;
@Override
public Map<String, Object> getUploadConfig() {
if(uploadConfig != null) {
return uploadConfig;
}
// 先从redis里面取
String json = redisService.getString(RedisConstants.UPLOAD_CONFIG);
if (json != null) {
uploadConfig = JsonUtil.jsonToObject(json, new TypeToken<Map<String,Object>>() {}.getType());
}
if (uploadConfig != null) {
log.debug("从redis里面取出了【上传类型】的信息");
return uploadConfig;
} else {
uploadConfig = new HashMap<>();
SystemConfig systemConfig = systemConfigDao.selectByKey("upload_type");
List<SystemConfig> list = systemConfigDao.selectByPid(new Integer(systemConfig.getValue()));
uploadConfig.put(systemConfig.getKey(), systemConfig.getValue());
list.forEach(systemConfig2 -> {
uploadConfig.put(systemConfig2.getKey(), systemConfig2.getValue());
});
// 将数据存进redis
redisService.setString(RedisConstants.UPLOAD_CONFIG, JsonUtil.objectToJson(uploadConfig));
}
return uploadConfig;
}
}
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.reactive.function.server;
import io.reactivex.Single;
import org.junit.jupiter.api.Test;
import org.reactivestreams.Publisher;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.core.codec.CharSequenceEncoder;
import org.springframework.http.*;
import org.springframework.http.codec.EncoderHttpMessageWriter;
import org.springframework.http.codec.HttpMessageWriter;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.reactive.result.view.ViewResolver;
import org.springframework.web.testfixture.http.server.reactive.MockServerHttpRequest;
import org.springframework.web.testfixture.http.server.reactive.MockServerHttpResponse;
import org.springframework.web.testfixture.server.MockServerWebExchange;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Arjen Poutsma
*/
public class DefaultEntityResponseBuilderTests {
@Test
public void fromObject() {
String body = "foo";
EntityResponse<String> response = EntityResponse.fromObject(body).build().block();
assertThat(response.entity()).isSameAs(body);
}
@Test
public void fromPublisherClass() {
Flux<String> body = Flux.just("foo", "bar");
EntityResponse<Flux<String>> response = EntityResponse.fromPublisher(body, String.class).build().block();
assertThat(response.entity()).isSameAs(body);
}
@Test
public void fromPublisher() {
Flux<String> body = Flux.just("foo", "bar");
ParameterizedTypeReference<String> typeReference = new ParameterizedTypeReference<String>() {
};
EntityResponse<Flux<String>> response = EntityResponse.fromPublisher(body, typeReference).build().block();
assertThat(response.entity()).isSameAs(body);
}
@Test
public void fromProducer() {
Single<String> body = Single.just("foo");
ParameterizedTypeReference<String> typeReference = new ParameterizedTypeReference<String>() {
};
EntityResponse<Single<String>> response = EntityResponse.fromProducer(body, typeReference).build().block();
assertThat(response.entity()).isSameAs(body);
}
@Test
public void status() {
String body = "foo";
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).status(HttpStatus.CREATED).build();
StepVerifier.create(result)
.expectNextMatches(response -> HttpStatus.CREATED.equals(response.statusCode()) &&
response.rawStatusCode() == 201)
.expectComplete()
.verify();
}
@Test
public void allow() {
String body = "foo";
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).allow(HttpMethod.GET).build();
Set<HttpMethod> expected = EnumSet.of(HttpMethod.GET);
StepVerifier.create(result)
.expectNextMatches(response -> expected.equals(response.headers().getAllow()))
.expectComplete()
.verify();
}
@Test
public void contentLength() {
String body = "foo";
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).contentLength(42).build();
StepVerifier.create(result)
.expectNextMatches(response -> Long.valueOf(42).equals(response.headers().getContentLength()))
.expectComplete()
.verify();
}
@Test
public void contentType() {
String body = "foo";
Mono<EntityResponse<String>>
result = EntityResponse.fromObject(body).contentType(MediaType.APPLICATION_JSON).build();
StepVerifier.create(result)
.expectNextMatches(response -> MediaType.APPLICATION_JSON.equals(response.headers().getContentType()))
.expectComplete()
.verify();
}
@Test
public void etag() {
String body = "foo";
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).eTag("foo").build();
StepVerifier.create(result)
.expectNextMatches(response -> "\"foo\"".equals(response.headers().getETag()))
.expectComplete()
.verify();
}
@Test
public void lastModified() {
ZonedDateTime now = ZonedDateTime.now();
String body = "foo";
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).lastModified(now).build();
Long expected = now.toInstant().toEpochMilli() / 1000;
StepVerifier.create(result)
.expectNextMatches(response -> expected.equals(response.headers().getLastModified() / 1000))
.expectComplete()
.verify();
}
@Test
public void cacheControlTag() {
String body = "foo";
Mono<EntityResponse<String>>
result = EntityResponse.fromObject(body).cacheControl(CacheControl.noCache()).build();
StepVerifier.create(result)
.expectNextMatches(response -> "no-cache".equals(response.headers().getCacheControl()))
.expectComplete()
.verify();
}
@Test
public void varyBy() {
String body = "foo";
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).varyBy("foo").build();
List<String> expected = Collections.singletonList("foo");
StepVerifier.create(result)
.expectNextMatches(response -> expected.equals(response.headers().getVary()))
.expectComplete()
.verify();
}
@Test
public void headers() {
String body = "foo";
HttpHeaders headers = new HttpHeaders();
Mono<EntityResponse<String>> result = EntityResponse.fromObject(body).headers(headers).build();
StepVerifier.create(result)
.expectNextMatches(response -> headers.equals(response.headers()))
.expectComplete()
.verify();
}
@Test
public void cookies() {
MultiValueMap<String, ResponseCookie> newCookies = new LinkedMultiValueMap<>();
newCookies.add("name", ResponseCookie.from("name", "value").build());
Mono<EntityResponse<String>> result =
EntityResponse.fromObject("foo").cookies(cookies -> cookies.addAll(newCookies)).build();
StepVerifier.create(result)
.expectNextMatches(response -> newCookies.equals(response.cookies()))
.expectComplete()
.verify();
}
@Test
public void bodyInserter() {
String body = "foo";
Publisher<String> publisher = Mono.just(body);
Mono<EntityResponse<Publisher<String>>> result = EntityResponse.fromPublisher(publisher, String.class).build();
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("http://localhost"));
ServerResponse.Context context = new ServerResponse.Context() {
@Override
public List<HttpMessageWriter<?>> messageWriters() {
return Collections.singletonList(new EncoderHttpMessageWriter<>(CharSequenceEncoder.allMimeTypes()));
}
@Override
public List<ViewResolver> viewResolvers() {
return Collections.emptyList();
}
};
StepVerifier.create(result)
.consumeNextWith(response -> {
StepVerifier.create(response.entity())
.expectNext(body)
.expectComplete()
.verify();
response.writeTo(exchange, context);
})
.expectComplete()
.verify();
assertThat(exchange.getResponse().getBody()).isNotNull();
}
@Test
public void notModifiedEtag() {
String etag = "\"foo\"";
EntityResponse<String> responseMono = EntityResponse.fromObject("bar")
.eTag(etag)
.build()
.block();
MockServerHttpRequest request = MockServerHttpRequest.get("https://example.com")
.header(HttpHeaders.IF_NONE_MATCH, etag)
.build();
MockServerWebExchange exchange = MockServerWebExchange.from(request);
responseMono.writeTo(exchange, DefaultServerResponseBuilderTests.EMPTY_CONTEXT);
MockServerHttpResponse response = exchange.getResponse();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.NOT_MODIFIED);
StepVerifier.create(response.getBody())
.expectError(IllegalStateException.class)
.verify();
}
@Test
public void notModifiedLastModified() {
ZonedDateTime now = ZonedDateTime.now();
ZonedDateTime oneMinuteBeforeNow = now.minus(1, ChronoUnit.MINUTES);
EntityResponse<String> responseMono = EntityResponse.fromObject("bar")
.lastModified(oneMinuteBeforeNow)
.build()
.block();
MockServerHttpRequest request = MockServerHttpRequest.get("https://example.com")
.header(HttpHeaders.IF_MODIFIED_SINCE,
DateTimeFormatter.RFC_1123_DATE_TIME.format(now))
.build();
MockServerWebExchange exchange = MockServerWebExchange.from(request);
responseMono.writeTo(exchange, DefaultServerResponseBuilderTests.EMPTY_CONTEXT);
MockServerHttpResponse response = exchange.getResponse();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.NOT_MODIFIED);
StepVerifier.create(response.getBody())
.expectError(IllegalStateException.class)
.verify();
}
}
|
package uk.gov.hmcts.reform.fpl.service.summary;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.stereotype.Service;
import uk.gov.hmcts.reform.fpl.model.CaseData;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@Service
public class CaseSummaryService {
private final List<CaseSummaryFieldsGenerator> generators;
private final ObjectMapper objectMapper;
@SuppressWarnings("all")
public CaseSummaryService(
CaseSummaryOrdersRequestedGenerator caseSummaryOrdersRequestedGenerator,
CaseSummaryDeadlineGenerator caseSummaryDeadlineGenerator,
CaseSummaryJudgeInformationGenerator caseSummaryJudgeInformationGenerator,
CaseSummaryMessagesGenerator caseSummaryMessagesGenerator,
CaseSummaryNextHearingGenerator caseSummaryNextHearingGenerator,
CaseSummaryPreviousHearingGenerator caseSummaryPreviousHearingGenerator,
CaseSummaryFinalHearingGenerator caseSummaryFinalHearingGenerator,
CaseSummaryPeopleInCaseGenerator caseSummaryPeopleInCaseGenerator,
ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
this.generators = List.of(
caseSummaryOrdersRequestedGenerator,
caseSummaryDeadlineGenerator,
caseSummaryJudgeInformationGenerator,
caseSummaryMessagesGenerator,
caseSummaryNextHearingGenerator,
caseSummaryPreviousHearingGenerator,
caseSummaryFinalHearingGenerator,
caseSummaryPeopleInCaseGenerator
);
}
public Map<String, Object> generateSummaryFields(CaseData caseData) {
return generators.stream()
.map(generator -> generator.generate(caseData))
.flatMap(summary -> objectMapper.convertValue(summary,
new TypeReference<Map<String, Object>>() {})
.entrySet().stream())
.collect(HashMap::new, (m, v) -> {
Object value = m.getOrDefault(v.getKey(), null);
if (Objects.isNull(value)) {
m.put(v.getKey(), v.getValue());
}
}, HashMap::putAll);
}
}
|
package org.swtk.commons.dict.wordnet.indexbyid.instance.p1.p1; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexIdInstance1151 { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("11510086", "{\"term\":\"japan current\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510086\"]}");
add("11510086", "{\"term\":\"kuroshio\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510086\"]}");
add("11510086", "{\"term\":\"kuroshio current\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510086\"]}");
add("11510278", "{\"term\":\"humboldt current\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510278\"]}");
add("11510278", "{\"term\":\"peruvian current\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510278\"]}");
add("11510458", "{\"term\":\"opacity\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"04710772\", \"04830489\", \"11510458\"]}");
add("11510657", "{\"term\":\"optical opacity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510657\"]}");
add("11510735", "{\"term\":\"radio-opacity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510735\"]}");
add("11510735", "{\"term\":\"radiopacity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510735\"]}");
add("11510863", "{\"term\":\"optical illusion\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11510863\"]}");
add("11511038", "{\"term\":\"optical phenomenon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511038\"]}");
add("11511334", "{\"term\":\"pea-souper\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511334\"]}");
add("11511334", "{\"term\":\"pea soup\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11511334\", \"07602175\"]}");
add("11511426", "{\"term\":\"phosphorescence\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511426\"]}");
add("11511594", "{\"term\":\"photoelectricity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511594\"]}");
add("11511708", "{\"term\":\"piezo effect\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511708\"]}");
add("11511708", "{\"term\":\"piezoelectric effect\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511708\"]}");
add("11511708", "{\"term\":\"piezoelectricity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11511708\"]}");
add("11512019", "{\"term\":\"pleochroism\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11512019\"]}");
add("11512216", "{\"term\":\"pleomorphism\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11512216\", \"11513233\"]}");
add("11512414", "{\"term\":\"polarisation\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11512414\", \"14025594\"]}");
add("11512414", "{\"term\":\"polarization\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"14025594\", \"11512414\"]}");
add("11512640", "{\"term\":\"depolarisation\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11512640\"]}");
add("11512640", "{\"term\":\"depolarization\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11512640\"]}");
add("11512788", "{\"term\":\"polymorphism\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"11512788\", \"11513233\", \"11513666\"]}");
add("11513043", "{\"term\":\"dimorphism\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11513043\", \"11513483\"]}");
add("11513233", "{\"term\":\"pleomorphism\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11512216\", \"11513233\"]}");
add("11513233", "{\"term\":\"polymorphism\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"11512788\", \"11513233\", \"11513666\"]}");
add("11513483", "{\"term\":\"dimorphism\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11513043\", \"11513483\"]}");
add("11513666", "{\"term\":\"polymorphism\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"11512788\", \"11513233\", \"11513666\"]}");
add("11513852", "{\"term\":\"single nucleotide polymorphism\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11513852\"]}");
add("11513852", "{\"term\":\"snp\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11513852\"]}");
add("11514227", "{\"term\":\"electric potential\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514227\"]}");
add("11514227", "{\"term\":\"potential\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11514227\", \"14506204\"]}");
add("11514227", "{\"term\":\"potential difference\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514227\"]}");
add("11514227", "{\"term\":\"potential drop\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514227\"]}");
add("11514227", "{\"term\":\"voltage\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11514227\", \"11543971\"]}");
add("11514476", "{\"term\":\"evoked potential\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514476\"]}");
add("11514687", "{\"term\":\"resting potential\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514687\"]}");
add("11514872", "{\"term\":\"p.e.\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514872\"]}");
add("11514872", "{\"term\":\"potential energy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11514872\"]}");
add("11515038", "{\"term\":\"downfall\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"07442328\", \"11515038\", \"07332733\"]}");
add("11515038", "{\"term\":\"precipitation\", \"synsetCount\":6, \"upperType\":\"NOUN\", \"ids\":[\"05067358\", \"05068318\", \"07448920\", \"11515038\", \"13561402\", \"13794793\"]}");
add("11515335", "{\"term\":\"gas pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11515335\"]}");
add("11515441", "{\"term\":\"force per unit area\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11515441\"]}");
add("11515441", "{\"term\":\"pressure\", \"synsetCount\":7, \"upperType\":\"NOUN\", \"ids\":[\"11449572\", \"14500660\", \"05732420\", \"14474815\", \"00113933\", \"05202630\", \"11515441\"]}");
add("11515441", "{\"term\":\"pressure level\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11515441\"]}");
add("11516007", "{\"term\":\"head\", \"synsetCount\":33, \"upperType\":\"NOUN\", \"ids\":[\"00856505\", \"03254982\", \"03506432\", \"03506664\", \"03506758\", \"03506955\", \"04065833\", \"05298255\", \"05603592\", \"06355341\", \"06796625\", \"06834918\", \"07387379\", \"07433422\", \"08590014\", \"09324750\", \"09324937\", \"09422467\", \"10182373\", \"10182947\", \"10494230\", \"13154545\", \"13675998\", \"14335908\", \"06302589\", \"08525470\", \"08681922\", \"11516007\", \"08499282\", \"10182584\", \"05619057\", \"01320872\", \"05546258\"]}");
add("11516108", "{\"term\":\"barometric pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516108\"]}");
add("11516222", "{\"term\":\"compartment pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516222\"]}");
add("11516364", "{\"term\":\"overpressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516364\"]}");
add("11516557", "{\"term\":\"sea-level pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516557\"]}");
add("11516696", "{\"term\":\"hydrostatic head\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516696\"]}");
add("11516903", "{\"term\":\"intraocular pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516903\"]}");
add("11516903", "{\"term\":\"iop\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11516903\"]}");
add("11517146", "{\"term\":\"oil pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517146\"]}");
add("11517281", "{\"term\":\"osmotic pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517281\"]}");
add("11517573", "{\"term\":\"corpuscular-radiation pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517573\"]}");
add("11517573", "{\"term\":\"radiation pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517573\"]}");
add("11517778", "{\"term\":\"instantaneous sound pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517778\"]}");
add("11517778", "{\"term\":\"sound pressure\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517778\"]}");
add("11517986", "{\"term\":\"prevailing wind\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11517986\"]}");
add("11518177", "{\"term\":\"propulsion\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00045991\", \"11518177\"]}");
add("11518288", "{\"term\":\"puff\", \"synsetCount\":8, \"upperType\":\"NOUN\", \"ids\":[\"00837162\", \"00839336\", \"03863938\", \"04001232\", \"04040471\", \"06707855\", \"07644079\", \"11518288\"]}");
add("11518288", "{\"term\":\"puff of air\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11518288\"]}");
add("11518288", "{\"term\":\"whiff\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"00131332\", \"02664869\", \"11518288\"]}");
add("11518440", "{\"term\":\"pull\", \"synsetCount\":7, \"upperType\":\"NOUN\", \"ids\":[\"00626667\", \"00839336\", \"14322572\", \"04026090\", \"05164657\", \"11518440\", \"00115251\"]}");
add("11518603", "{\"term\":\"push\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"00283329\", \"04033499\", \"04843914\", \"11518603\", \"00113132\"]}");
add("11518603", "{\"term\":\"thrust\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"00335761\", \"06724343\", \"00104654\", \"01176455\", \"11518603\"]}");
add("11518861", "{\"term\":\"reaction\", \"synsetCount\":7, \"upperType\":\"NOUN\", \"ids\":[\"00203665\", \"06226571\", \"11437243\", \"11518861\", \"00860679\", \"05925922\", \"13468534\"]}");
add("11519079", "{\"term\":\"rocket propulsion\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11519079\"]}");
add("11519250", "{\"term\":\"reaction propulsion\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11519250\"]}");
add("11519491", "{\"term\":\"radiant energy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11519491\"]}");
add("11519684", "{\"term\":\"radiation\", \"synsetCount\":7, \"upperType\":\"NOUN\", \"ids\":[\"00706833\", \"05511206\", \"07454483\", \"13566387\", \"14331899\", \"01255505\", \"11519684\"]}");
add("11519910", "{\"term\":\"corpuscular radiation\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11519910\"]}");
add("11519910", "{\"term\":\"particulate radiation\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11519910\"]}");
} private static void add(final String ID, final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(ID)) ? map.get(ID) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(ID, list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> ids() { return map.keySet(); } }
|
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.util.concurrent.atomic.AtomicLong;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.orientechnologies.common.concur.ONeedRetryException;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.sql.OCommandSQL;
@Test
public class ConcurrentQueriesTest {
private final static int THREADS = 10;
private final static int CYCLES = 50;
private final static int MAX_RETRIES = 50;
protected String url;
private ODatabaseDocumentTx db;
private final AtomicLong counter = new AtomicLong();
private final AtomicLong totalRetries = new AtomicLong();
class CommandExecutor implements Runnable {
String url;
String threadName;
public CommandExecutor(String url, String iThreadName) {
super();
this.url = url;
threadName = iThreadName;
}
@Override
public void run() {
try {
for (int i = 0; i < CYCLES; i++) {
ODatabaseDocumentTx db = new ODatabaseDocumentTx(url).open("admin", "admin");
try {
for (int retry = 0; retry < MAX_RETRIES; ++retry) {
try {
db.command(new OCommandSQL("select from Concurrent")).execute();
counter.incrementAndGet();
totalRetries.addAndGet(retry);
break;
} catch (ONeedRetryException e) {
System.out.println("Retry " + retry + "/" + MAX_RETRIES + "...");
Thread.sleep(retry * 10);
}
}
} finally {
db.close();
}
}
} catch (Throwable e) {
e.printStackTrace();
Assert.assertTrue(false);
}
}
}
@Parameters(value = "url")
public ConcurrentQueriesTest(@Optional(value = "memory:test") String iURL) {
url = iURL;
}
@BeforeClass
public void init() {
if ("memory:test".equals(url))
new ODatabaseDocumentTx(url).create().close();
db = new ODatabaseDocumentTx(url).open("admin", "admin");
db.getMetadata().getSchema().createClass("Concurrent");
for (int i = 0; i < 1000; ++i) {
db.newInstance("Concurrent").field("test", i).save();
}
}
@AfterClass
public void deinit() {
if (!db.isClosed())
db.close();
}
@Test
public void concurrentCommands() throws Exception {
Thread[] threads = new Thread[THREADS];
System.out.println("Spanning " + THREADS + " threads...");
for (int i = 0; i < THREADS; ++i) {
threads[i] = new Thread(new CommandExecutor(url, "thread" + i), "ConcurrentTest1");
}
System.out.println("Starting " + THREADS + " threads...");
for (int i = 0; i < THREADS; ++i) {
threads[i].start();
}
System.out.println("Waiting for " + THREADS + " threads...");
for (int i = 0; i < THREADS; ++i) {
threads[i].join();
}
System.out.println("Done! Total queries executed in parallel: " + counter.get() + " average retries: "
+ ((float) totalRetries.get() / (float) counter.get()));
Assert.assertEquals(counter.get(), CYCLES * THREADS);
}
}
|
package org.codehaus.prometheus.references;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
public class NonBlockingAwaitableReference<E> implements AwaitableReference<E> {
private final AtomicReference<E> reference = new AtomicReference<E>();
public boolean isTakePossible() {
return reference.get()!=null;
}
public E take() throws InterruptedException {
//todo
return null;
}
public E tryTake() {
return reference.get();
}
public E tryTake(long timeout, TimeUnit unit) throws TimeoutException {
//todo
return null;
}
public E put(E newRef) {
return reference.getAndSet(newRef);
}
public E tryPut(E newRef, long timeout, TimeUnit unit) throws TimeoutException {
return reference.getAndSet(newRef);
}
public E peek() {
return reference.get();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.start.classloader.vfs;
import java.io.File;
import java.util.HashSet;
import org.apache.accumulo.start.classloader.vfs.ContextManager.ContextConfig;
import org.apache.accumulo.start.classloader.vfs.ContextManager.ContextsConfig;
import org.apache.commons.io.FileUtils;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileSystemManager;
import org.apache.commons.vfs2.impl.VFSClassLoader;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class ContextManagerTest {
private TemporaryFolder folder1 = new TemporaryFolder(
new File(System.getProperty("user.dir") + "/target"));
private TemporaryFolder folder2 = new TemporaryFolder(
new File(System.getProperty("user.dir") + "/target"));
private FileSystemManager vfs;
private String uri1;
private String uri2;
static FileSystemManager getVFS() {
try {
return AccumuloVFSClassLoader.generateVfs();
} catch (FileSystemException e) {
throw new RuntimeException("Error setting up VFS", e);
}
}
@Before
public void setup() throws Exception {
vfs = getVFS();
folder1.create();
folder2.create();
FileUtils.copyURLToFile(this.getClass().getResource("/HelloWorld.jar"),
folder1.newFile("HelloWorld.jar"));
FileUtils.copyURLToFile(this.getClass().getResource("/HelloWorld.jar"),
folder2.newFile("HelloWorld.jar"));
uri1 = new File(folder1.getRoot(), "HelloWorld.jar").toURI().toString();
uri2 = folder2.getRoot().toURI().toString() + ".*";
}
FileObject[] createFileSystems(FileObject[] fos) throws FileSystemException {
FileObject[] rfos = new FileObject[fos.length];
for (int i = 0; i < fos.length; i++) {
if (vfs.canCreateFileSystem(fos[i]))
rfos[i] = vfs.createFileSystem(fos[i]);
else
rfos[i] = fos[i];
}
return rfos;
}
@Test
public void differentContexts() throws Exception {
ContextManager cm = new ContextManager(vfs, new ReloadingClassLoader() {
@Override
public ClassLoader getClassLoader() {
return ClassLoader.getSystemClassLoader();
}
});
cm.setContextConfig(new ContextsConfig() {
@Override
public ContextConfig getContextConfig(String context) {
if (context.equals("CX1")) {
return new ContextConfig(uri1, true);
} else if (context.equals("CX2")) {
return new ContextConfig(uri2, true);
}
return null;
}
});
FileObject testDir = vfs.resolveFile(folder1.getRoot().toURI().toString());
FileObject[] dirContents = testDir.getChildren();
ClassLoader cl1 = cm.getClassLoader("CX1");
FileObject[] files = ((VFSClassLoader) cl1).getFileObjects();
Assert.assertArrayEquals(createFileSystems(dirContents), files);
FileObject testDir2 = vfs.resolveFile(folder2.getRoot().toURI().toString());
FileObject[] dirContents2 = testDir2.getChildren();
ClassLoader cl2 = cm.getClassLoader("CX2");
FileObject[] files2 = ((VFSClassLoader) cl2).getFileObjects();
Assert.assertArrayEquals(createFileSystems(dirContents2), files2);
Class<?> defaultContextClass = cl1.loadClass("test.HelloWorld");
Object o1 = defaultContextClass.newInstance();
Assert.assertEquals("Hello World!", o1.toString());
Class<?> myContextClass = cl2.loadClass("test.HelloWorld");
Object o2 = myContextClass.newInstance();
Assert.assertEquals("Hello World!", o2.toString());
Assert.assertFalse(defaultContextClass.equals(myContextClass));
cm.removeUnusedContexts(new HashSet<>());
}
@Test
public void testPostDelegation() throws Exception {
final VFSClassLoader parent = new VFSClassLoader(new FileObject[] {vfs.resolveFile(uri1)}, vfs);
Class<?> pclass = parent.loadClass("test.HelloWorld");
ContextManager cm = new ContextManager(vfs, new ReloadingClassLoader() {
@Override
public ClassLoader getClassLoader() {
return parent;
}
});
cm.setContextConfig(new ContextsConfig() {
@Override
public ContextConfig getContextConfig(String context) {
if (context.equals("CX1")) {
return new ContextConfig(uri2.toString(), true);
} else if (context.equals("CX2")) {
return new ContextConfig(uri2.toString(), false);
}
return null;
}
});
Assert.assertTrue(cm.getClassLoader("CX1").loadClass("test.HelloWorld") == pclass);
Assert.assertFalse(cm.getClassLoader("CX2").loadClass("test.HelloWorld") == pclass);
}
@After
public void tearDown() throws Exception {
folder1.delete();
folder2.delete();
}
}
|
/**
* Copyright Jakub Staroń, 2015
*/
package bitwaNaTeksty;
import java.util.Collection;
/**
* @author Kuba
* Interfejs procesora tekstów.
*/
public interface Processor {
/**
* Procesuje kolekcjê Ÿróde³ tekstów (artystów).
* @param sources
*/
public void process(Collection<TextSource> sources);
/**
* Procesuje pojedyñcze Ÿród³o tekstów (pojedyñczego artystê).
* @param source
*/
public void process(TextSource source);
}
|
package rpc.framework.provider;
/**
* @program rpc-framework-jc
* @description: 保存和提供服务实例对象。服务端使用。
* @author: JC
* @create: 2020/08/02 22:32
*/
public interface ServiceProvider {
/**
* @description: 保存服务实例对象和服务实例对象实现的接口类的对应关系
* @Param service 服务实例对象
* serviceClass 服务实例对象实现的接口类
* T 服务接口的类型
* @date: 2020/8/2
*/
<T> void addServiceProvider(T service, Class<T> serviceClass);
/**
* @description: 获取服务实例对象
* @Param serviceName 服务实例对象实现的接口类的类名
* return 服务实例对象
* @date: 2020/8/2
*/
Object getServiceProvider(String serviceName);
}
|
/*
* Copyright 2015 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.wires.core.scratchpad.client.shapes.connectors;
import com.ait.lienzo.client.core.event.NodeDragMoveEvent;
import com.ait.lienzo.client.core.event.NodeDragMoveHandler;
import com.ait.lienzo.client.core.shape.Line;
import org.uberfire.ext.wires.core.api.controlpoints.ControlPointMoveHandler;
import org.uberfire.ext.wires.core.api.magnets.Magnet;
import org.uberfire.ext.wires.core.api.magnets.MagnetManager;
import org.uberfire.ext.wires.core.api.magnets.RequiresMagnetManager;
import org.uberfire.ext.wires.core.api.shapes.WiresBaseDynamicShape;
import org.uberfire.ext.wires.core.api.shapes.WiresShape;
import org.uberfire.ext.wires.core.client.controlpoints.ConnectibleControlPoint;
import org.uberfire.ext.wires.core.client.util.GeometryUtil;
public class WiresLine extends WiresBaseDynamicShape implements MagnetManager,
RequiresMagnetManager {
private static final int BOUNDARY_SIZE = 10;
//We do not hide the boundary item for Lines as it makes selecting them very difficult
private static final double ALPHA_DESELECTED = 0.01;
private static final double ALPHA_SELECTED = 0.1;
private final Line line;
private final Line bounding;
private final ConnectibleControlPoint controlPoint1;
private final ConnectibleControlPoint controlPoint2;
private MagnetManager magnetManager;
public WiresLine(final Line shape) {
final double x1 = shape.getPoints().get(0).getX();
final double y1 = shape.getPoints().get(0).getY();
final double x2 = shape.getPoints().get(1).getX();
final double y2 = shape.getPoints().get(1).getY();
line = shape;
bounding = new Line(x1,
y1,
x2,
y2);
bounding.setStrokeWidth(BOUNDARY_SIZE);
bounding.setAlpha(ALPHA_DESELECTED);
add(line);
add(bounding);
magnets.clear();
controlPoints.clear();
controlPoint1 = new ConnectibleControlPoint(x1,
y1,
this,
this,
new ControlPointMoveHandler() {
@Override
public void onMove(final double x,
final double y) {
line.getPoints().get(0).setX(x - getX());
line.getPoints().get(0).setY(y - getY());
bounding.getPoints().get(0).setX(x - getX());
bounding.getPoints().get(0).setY(y - getY());
}
});
controlPoint2 = new ConnectibleControlPoint(x2,
y2,
this,
this,
new ControlPointMoveHandler() {
@Override
public void onMove(final double x,
final double y) {
line.getPoints().get(1).setX(x - getX());
line.getPoints().get(1).setY(y - getY());
bounding.getPoints().get(1).setX(x - getX());
bounding.getPoints().get(1).setY(y - getY());
}
});
addControlPoint(controlPoint1);
addControlPoint(controlPoint2);
//If Connector is dragged as a whole (i.e. not a ControlPoint) detach it from Magnets
addNodeDragMoveHandler(new NodeDragMoveHandler() {
@Override
public void onNodeDragMove(final NodeDragMoveEvent nodeDragMoveEvent) {
final Magnet boundMagnet1 = controlPoint1.getBoundMagnet();
final Magnet boundMagnet2 = controlPoint2.getBoundMagnet();
if (boundMagnet1 != null) {
boundMagnet1.detachControlPoint(controlPoint1);
}
if (boundMagnet2 != null) {
boundMagnet2.detachControlPoint(controlPoint2);
}
getLayer().batch();
}
});
}
@Override
public void setMagnetManager(final MagnetManager magnetManager) {
this.magnetManager = magnetManager;
}
@Override
public void hideAllMagnets() {
if (magnetManager != null) {
magnetManager.hideAllMagnets();
}
}
@Override
public Magnet getMagnet(final WiresShape shapeActive,
final double cx,
final double cy) {
if (this.magnetManager != null) {
return magnetManager.getMagnet(shapeActive,
cx,
cy);
}
return null;
}
@Override
public void setSelected(final boolean isSelected) {
if (isSelected) {
bounding.setAlpha(ALPHA_SELECTED);
} else {
bounding.setAlpha(ALPHA_DESELECTED);
}
}
@Override
public boolean contains(final double cx,
final double cy) {
final double _x = cx - getX();
final double _y = cy - getY();
return Math.sqrt(GeometryUtil.ptSegDistSq(line.getPoints().get(0).getX(),
line.getPoints().get(0).getY(),
line.getPoints().get(1).getX(),
line.getPoints().get(1).getY(),
_x,
_y)) < BOUNDARY_SIZE;
}
@Override
public String toString() {
return "WiresLine{" + "id=" + getId() + ",x = " + getX() + ", y = " + getY() + "}";
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.errors.TopologyException;
import org.apache.kafka.streams.processor.ProcessorSupplier;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.StreamPartitioner;
import org.apache.kafka.streams.processor.TimestampExtractor;
import org.apache.kafka.streams.processor.TopicNameExtractor;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.internals.SessionStoreBuilder;
import org.apache.kafka.streams.state.internals.WindowStoreBuilder;
import org.apache.kafka.streams.state.internals.TimestampedWindowStoreBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
public class InternalTopologyBuilder {
private static final Logger log = LoggerFactory.getLogger(InternalTopologyBuilder.class);
private static final Pattern EMPTY_ZERO_LENGTH_PATTERN = Pattern.compile("");
private static final String[] NO_PREDECESSORS = {};
// node factories in a topological order
private final Map<String, NodeFactory> nodeFactories = new LinkedHashMap<>();
// state factories
private final Map<String, StateStoreFactory> stateFactories = new HashMap<>();
// built global state stores
private final Map<String, StoreBuilder> globalStateBuilders = new LinkedHashMap<>();
// built global state stores
private final Map<String, StateStore> globalStateStores = new LinkedHashMap<>();
// all topics subscribed from source processors (without application-id prefix for internal topics)
private final Set<String> sourceTopicNames = new HashSet<>();
// all internal topics auto-created by the topology builder and used in source / sink processors
private final Set<String> internalTopicNames = new HashSet<>();
// groups of source processors that need to be copartitioned
private final List<Set<String>> copartitionSourceGroups = new ArrayList<>();
// map from source processor names to subscribed topics (without application-id prefix for internal topics)
private final Map<String, List<String>> nodeToSourceTopics = new HashMap<>();
// map from source processor names to regex subscription patterns
private final Map<String, Pattern> nodeToSourcePatterns = new LinkedHashMap<>();
// map from sink processor names to subscribed topic (without application-id prefix for internal topics)
private final Map<String, String> nodeToSinkTopic = new HashMap<>();
// map from topics to their matched regex patterns, this is to ensure one topic is passed through on source node
// even if it can be matched by multiple regex patterns
private final Map<String, Pattern> topicToPatterns = new HashMap<>();
// map from state store names to all the topics subscribed from source processors that
// are connected to these state stores
private final Map<String, Set<String>> stateStoreNameToSourceTopics = new HashMap<>();
// map from state store names to all the regex subscribed topics from source processors that
// are connected to these state stores
private final Map<String, Set<Pattern>> stateStoreNameToSourceRegex = new HashMap<>();
// map from state store names to this state store's corresponding changelog topic if possible
private final Map<String, String> storeToChangelogTopic = new HashMap<>();
// all global topics
private final Set<String> globalTopics = new HashSet<>();
private final Set<String> earliestResetTopics = new HashSet<>();
private final Set<String> latestResetTopics = new HashSet<>();
private final Set<Pattern> earliestResetPatterns = new HashSet<>();
private final Set<Pattern> latestResetPatterns = new HashSet<>();
private final QuickUnion<String> nodeGrouper = new QuickUnion<>();
private SubscriptionUpdates subscriptionUpdates = new SubscriptionUpdates();
private String applicationId = null;
private Pattern topicPattern = null;
private Map<Integer, Set<String>> nodeGroups = null;
public static class StateStoreFactory {
private final StoreBuilder builder;
private final Set<String> users = new HashSet<>();
private StateStoreFactory(final StoreBuilder<?> builder) {
this.builder = builder;
}
public StateStore build() {
return builder.build();
}
long retentionPeriod() {
if (builder instanceof WindowStoreBuilder) {
return ((WindowStoreBuilder) builder).retentionPeriod();
} else if (builder instanceof TimestampedWindowStoreBuilder) {
return ((TimestampedWindowStoreBuilder) builder).retentionPeriod();
} else if (builder instanceof SessionStoreBuilder) {
return ((SessionStoreBuilder) builder).retentionPeriod();
} else {
throw new IllegalStateException("retentionPeriod is not supported when not a window store");
}
}
private Set<String> users() {
return users;
}
public boolean loggingEnabled() {
return builder.loggingEnabled();
}
private String name() {
return builder.name();
}
private boolean isWindowStore() {
return builder instanceof WindowStoreBuilder
|| builder instanceof TimestampedWindowStoreBuilder
|| builder instanceof SessionStoreBuilder;
}
// Apparently Java strips the generics from this method because we're using the raw type for builder,
// even though this method doesn't use builder's (missing) type parameter. Our usage seems obviously
// correct, though, hence the suppression.
@SuppressWarnings("unchecked")
private Map<String, String> logConfig() {
return builder.logConfig();
}
}
private static abstract class NodeFactory {
final String name;
final String[] predecessors;
NodeFactory(final String name,
final String[] predecessors) {
this.name = name;
this.predecessors = predecessors;
}
public abstract ProcessorNode build();
abstract AbstractNode describe();
}
private static class ProcessorNodeFactory extends NodeFactory {
private final ProcessorSupplier<?, ?> supplier;
private final Set<String> stateStoreNames = new HashSet<>();
ProcessorNodeFactory(final String name,
final String[] predecessors,
final ProcessorSupplier<?, ?> supplier) {
super(name, predecessors.clone());
this.supplier = supplier;
}
public void addStateStore(final String stateStoreName) {
stateStoreNames.add(stateStoreName);
}
@Override
public ProcessorNode build() {
return new ProcessorNode<>(name, supplier.get(), stateStoreNames);
}
@Override
Processor describe() {
return new Processor(name, new HashSet<>(stateStoreNames));
}
}
private class SourceNodeFactory extends NodeFactory {
private final List<String> topics;
private final Pattern pattern;
private final Deserializer<?> keyDeserializer;
private final Deserializer<?> valDeserializer;
private final TimestampExtractor timestampExtractor;
private SourceNodeFactory(final String name,
final String[] topics,
final Pattern pattern,
final TimestampExtractor timestampExtractor,
final Deserializer<?> keyDeserializer,
final Deserializer<?> valDeserializer) {
super(name, NO_PREDECESSORS);
this.topics = topics != null ? Arrays.asList(topics) : new ArrayList<>();
this.pattern = pattern;
this.keyDeserializer = keyDeserializer;
this.valDeserializer = valDeserializer;
this.timestampExtractor = timestampExtractor;
}
List<String> getTopics(final Collection<String> subscribedTopics) {
// if it is subscribed via patterns, it is possible that the topic metadata has not been updated
// yet and hence the map from source node to topics is stale, in this case we put the pattern as a place holder;
// this should only happen for debugging since during runtime this function should always be called after the metadata has updated.
if (subscribedTopics.isEmpty()) {
return Collections.singletonList(String.valueOf(pattern));
}
final List<String> matchedTopics = new ArrayList<>();
for (final String update : subscribedTopics) {
if (pattern == topicToPatterns.get(update)) {
matchedTopics.add(update);
} else if (topicToPatterns.containsKey(update) && isMatch(update)) {
// the same topic cannot be matched to more than one pattern
// TODO: we should lift this requirement in the future
throw new TopologyException("Topic " + update +
" is already matched for another regex pattern " + topicToPatterns.get(update) +
" and hence cannot be matched to this regex pattern " + pattern + " any more.");
} else if (isMatch(update)) {
topicToPatterns.put(update, pattern);
matchedTopics.add(update);
}
}
return matchedTopics;
}
@Override
public ProcessorNode build() {
final List<String> sourceTopics = nodeToSourceTopics.get(name);
// if it is subscribed via patterns, it is possible that the topic metadata has not been updated
// yet and hence the map from source node to topics is stale, in this case we put the pattern as a place holder;
// this should only happen for debugging since during runtime this function should always be called after the metadata has updated.
if (sourceTopics == null) {
return new SourceNode<>(name, Collections.singletonList(String.valueOf(pattern)), timestampExtractor, keyDeserializer, valDeserializer);
} else {
return new SourceNode<>(name, maybeDecorateInternalSourceTopics(sourceTopics), timestampExtractor, keyDeserializer, valDeserializer);
}
}
private boolean isMatch(final String topic) {
return pattern.matcher(topic).matches();
}
@Override
Source describe() {
return new Source(name, new HashSet<>(topics), pattern);
}
}
private class SinkNodeFactory<K, V> extends NodeFactory {
private final Serializer<K> keySerializer;
private final Serializer<V> valSerializer;
private final StreamPartitioner<? super K, ? super V> partitioner;
private final TopicNameExtractor<K, V> topicExtractor;
private SinkNodeFactory(final String name,
final String[] predecessors,
final TopicNameExtractor<K, V> topicExtractor,
final Serializer<K> keySerializer,
final Serializer<V> valSerializer,
final StreamPartitioner<? super K, ? super V> partitioner) {
super(name, predecessors.clone());
this.topicExtractor = topicExtractor;
this.keySerializer = keySerializer;
this.valSerializer = valSerializer;
this.partitioner = partitioner;
}
@Override
public ProcessorNode build() {
if (topicExtractor instanceof StaticTopicNameExtractor) {
final String topic = ((StaticTopicNameExtractor) topicExtractor).topicName;
if (internalTopicNames.contains(topic)) {
// prefix the internal topic name with the application id
return new SinkNode<>(name, new StaticTopicNameExtractor<>(decorateTopic(topic)), keySerializer, valSerializer, partitioner);
} else {
return new SinkNode<>(name, topicExtractor, keySerializer, valSerializer, partitioner);
}
} else {
return new SinkNode<>(name, topicExtractor, keySerializer, valSerializer, partitioner);
}
}
@Override
Sink describe() {
return new Sink(name, topicExtractor);
}
}
// public for testing only
public synchronized final InternalTopologyBuilder setApplicationId(final String applicationId) {
Objects.requireNonNull(applicationId, "applicationId can't be null");
this.applicationId = applicationId;
return this;
}
public synchronized final InternalTopologyBuilder rewriteTopology(final StreamsConfig config) {
Objects.requireNonNull(config, "config can't be null");
// set application id
setApplicationId(config.getString(StreamsConfig.APPLICATION_ID_CONFIG));
// maybe strip out caching layers
if (config.getLong(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG) == 0L) {
for (final StateStoreFactory storeFactory : stateFactories.values()) {
storeFactory.builder.withCachingDisabled();
}
for (final StoreBuilder storeBuilder : globalStateBuilders.values()) {
storeBuilder.withCachingDisabled();
}
}
// build global state stores
for (final StoreBuilder storeBuilder : globalStateBuilders.values()) {
globalStateStores.put(storeBuilder.name(), storeBuilder.build());
}
return this;
}
public final void addSource(final Topology.AutoOffsetReset offsetReset,
final String name,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final String... topics) {
if (topics.length == 0) {
throw new TopologyException("You must provide at least one topic");
}
Objects.requireNonNull(name, "name must not be null");
if (nodeFactories.containsKey(name)) {
throw new TopologyException("Processor " + name + " is already added.");
}
for (final String topic : topics) {
Objects.requireNonNull(topic, "topic names cannot be null");
validateTopicNotAlreadyRegistered(topic);
maybeAddToResetList(earliestResetTopics, latestResetTopics, offsetReset, topic);
sourceTopicNames.add(topic);
}
nodeFactories.put(name, new SourceNodeFactory(name, topics, null, timestampExtractor, keyDeserializer, valDeserializer));
nodeToSourceTopics.put(name, Arrays.asList(topics));
nodeGrouper.add(name);
nodeGroups = null;
}
public final void addSource(final Topology.AutoOffsetReset offsetReset,
final String name,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final Pattern topicPattern) {
Objects.requireNonNull(topicPattern, "topicPattern can't be null");
Objects.requireNonNull(name, "name can't be null");
if (nodeFactories.containsKey(name)) {
throw new TopologyException("Processor " + name + " is already added.");
}
for (final String sourceTopicName : sourceTopicNames) {
if (topicPattern.matcher(sourceTopicName).matches()) {
throw new TopologyException("Pattern " + topicPattern + " will match a topic that has already been registered by another source.");
}
}
for (final Pattern otherPattern : earliestResetPatterns) {
if (topicPattern.pattern().contains(otherPattern.pattern()) || otherPattern.pattern().contains(topicPattern.pattern())) {
throw new TopologyException("Pattern " + topicPattern + " will overlap with another pattern " + otherPattern + " already been registered by another source");
}
}
for (final Pattern otherPattern : latestResetPatterns) {
if (topicPattern.pattern().contains(otherPattern.pattern()) || otherPattern.pattern().contains(topicPattern.pattern())) {
throw new TopologyException("Pattern " + topicPattern + " will overlap with another pattern " + otherPattern + " already been registered by another source");
}
}
maybeAddToResetList(earliestResetPatterns, latestResetPatterns, offsetReset, topicPattern);
nodeFactories.put(name, new SourceNodeFactory(name, null, topicPattern, timestampExtractor, keyDeserializer, valDeserializer));
nodeToSourcePatterns.put(name, topicPattern);
nodeGrouper.add(name);
nodeGroups = null;
}
public final <K, V> void addSink(final String name,
final String topic,
final Serializer<K> keySerializer,
final Serializer<V> valSerializer,
final StreamPartitioner<? super K, ? super V> partitioner,
final String... predecessorNames) {
Objects.requireNonNull(name, "name must not be null");
Objects.requireNonNull(topic, "topic must not be null");
Objects.requireNonNull(predecessorNames, "predecessor names must not be null");
if (predecessorNames.length == 0) {
throw new TopologyException("Sink " + name + " must have at least one parent");
}
addSink(name, new StaticTopicNameExtractor<>(topic), keySerializer, valSerializer, partitioner, predecessorNames);
nodeToSinkTopic.put(name, topic);
nodeGroups = null;
}
public final <K, V> void addSink(final String name,
final TopicNameExtractor<K, V> topicExtractor,
final Serializer<K> keySerializer,
final Serializer<V> valSerializer,
final StreamPartitioner<? super K, ? super V> partitioner,
final String... predecessorNames) {
Objects.requireNonNull(name, "name must not be null");
Objects.requireNonNull(topicExtractor, "topic extractor must not be null");
Objects.requireNonNull(predecessorNames, "predecessor names must not be null");
if (nodeFactories.containsKey(name)) {
throw new TopologyException("Processor " + name + " is already added.");
}
if (predecessorNames.length == 0) {
throw new TopologyException("Sink " + name + " must have at least one parent");
}
for (final String predecessor : predecessorNames) {
Objects.requireNonNull(predecessor, "predecessor name can't be null");
if (predecessor.equals(name)) {
throw new TopologyException("Processor " + name + " cannot be a predecessor of itself.");
}
if (!nodeFactories.containsKey(predecessor)) {
throw new TopologyException("Predecessor processor " + predecessor + " is not added yet.");
}
if (nodeToSinkTopic.containsKey(predecessor)) {
throw new TopologyException("Sink " + predecessor + " cannot be used a parent.");
}
}
nodeFactories.put(name, new SinkNodeFactory<>(name, predecessorNames, topicExtractor, keySerializer, valSerializer, partitioner));
nodeGrouper.add(name);
nodeGrouper.unite(name, predecessorNames);
nodeGroups = null;
}
public final void addProcessor(final String name,
final ProcessorSupplier supplier,
final String... predecessorNames) {
Objects.requireNonNull(name, "name must not be null");
Objects.requireNonNull(supplier, "supplier must not be null");
Objects.requireNonNull(predecessorNames, "predecessor names must not be null");
if (nodeFactories.containsKey(name)) {
throw new TopologyException("Processor " + name + " is already added.");
}
if (predecessorNames.length == 0) {
throw new TopologyException("Processor " + name + " must have at least one parent");
}
for (final String predecessor : predecessorNames) {
Objects.requireNonNull(predecessor, "predecessor name must not be null");
if (predecessor.equals(name)) {
throw new TopologyException("Processor " + name + " cannot be a predecessor of itself.");
}
if (!nodeFactories.containsKey(predecessor)) {
throw new TopologyException("Predecessor processor " + predecessor + " is not added yet for " + name);
}
}
nodeFactories.put(name, new ProcessorNodeFactory(name, predecessorNames, supplier));
nodeGrouper.add(name);
nodeGrouper.unite(name, predecessorNames);
nodeGroups = null;
}
public final void addStateStore(final StoreBuilder<?> storeBuilder,
final String... processorNames) {
addStateStore(storeBuilder, false, processorNames);
}
public final void addStateStore(final StoreBuilder<?> storeBuilder,
final boolean allowOverride,
final String... processorNames) {
Objects.requireNonNull(storeBuilder, "storeBuilder can't be null");
if (!allowOverride && stateFactories.containsKey(storeBuilder.name())) {
throw new TopologyException("StateStore " + storeBuilder.name() + " is already added.");
}
stateFactories.put(storeBuilder.name(), new StateStoreFactory(storeBuilder));
if (processorNames != null) {
for (final String processorName : processorNames) {
Objects.requireNonNull(processorName, "processor name must not be null");
connectProcessorAndStateStore(processorName, storeBuilder.name());
}
}
nodeGroups = null;
}
public final void addGlobalStore(final StoreBuilder storeBuilder,
final String sourceName,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valueDeserializer,
final String topic,
final String processorName,
final ProcessorSupplier stateUpdateSupplier) {
Objects.requireNonNull(storeBuilder, "store builder must not be null");
validateGlobalStoreArguments(sourceName,
topic,
processorName,
stateUpdateSupplier,
storeBuilder.name(),
storeBuilder.loggingEnabled());
validateTopicNotAlreadyRegistered(topic);
final String[] topics = {topic};
final String[] predecessors = {sourceName};
final ProcessorNodeFactory nodeFactory = new ProcessorNodeFactory(processorName,
predecessors,
stateUpdateSupplier);
globalTopics.add(topic);
nodeFactories.put(sourceName, new SourceNodeFactory(sourceName,
topics,
null,
timestampExtractor,
keyDeserializer,
valueDeserializer));
nodeToSourceTopics.put(sourceName, Arrays.asList(topics));
nodeGrouper.add(sourceName);
nodeFactory.addStateStore(storeBuilder.name());
nodeFactories.put(processorName, nodeFactory);
nodeGrouper.add(processorName);
nodeGrouper.unite(processorName, predecessors);
globalStateBuilders.put(storeBuilder.name(), storeBuilder);
connectSourceStoreAndTopic(storeBuilder.name(), topic);
nodeGroups = null;
}
private void validateTopicNotAlreadyRegistered(final String topic) {
if (sourceTopicNames.contains(topic) || globalTopics.contains(topic)) {
throw new TopologyException("Topic " + topic + " has already been registered by another source.");
}
for (final Pattern pattern : nodeToSourcePatterns.values()) {
if (pattern.matcher(topic).matches()) {
throw new TopologyException("Topic " + topic + " matches a Pattern already registered by another source.");
}
}
}
public final void connectProcessorAndStateStores(final String processorName,
final String... stateStoreNames) {
Objects.requireNonNull(processorName, "processorName can't be null");
Objects.requireNonNull(stateStoreNames, "state store list must not be null");
if (stateStoreNames.length == 0) {
throw new TopologyException("Must provide at least one state store name.");
}
for (final String stateStoreName : stateStoreNames) {
Objects.requireNonNull(stateStoreName, "state store name must not be null");
connectProcessorAndStateStore(processorName, stateStoreName);
}
nodeGroups = null;
}
public void connectSourceStoreAndTopic(final String sourceStoreName,
final String topic) {
if (storeToChangelogTopic.containsKey(sourceStoreName)) {
throw new TopologyException("Source store " + sourceStoreName + " is already added.");
}
storeToChangelogTopic.put(sourceStoreName, topic);
}
public final void addInternalTopic(final String topicName) {
Objects.requireNonNull(topicName, "topicName can't be null");
internalTopicNames.add(topicName);
}
public final void copartitionSources(final Collection<String> sourceNodes) {
copartitionSourceGroups.add(Collections.unmodifiableSet(new HashSet<>(sourceNodes)));
}
private void validateGlobalStoreArguments(final String sourceName,
final String topic,
final String processorName,
final ProcessorSupplier stateUpdateSupplier,
final String storeName,
final boolean loggingEnabled) {
Objects.requireNonNull(sourceName, "sourceName must not be null");
Objects.requireNonNull(topic, "topic must not be null");
Objects.requireNonNull(stateUpdateSupplier, "supplier must not be null");
Objects.requireNonNull(processorName, "processorName must not be null");
if (nodeFactories.containsKey(sourceName)) {
throw new TopologyException("Processor " + sourceName + " is already added.");
}
if (nodeFactories.containsKey(processorName)) {
throw new TopologyException("Processor " + processorName + " is already added.");
}
if (stateFactories.containsKey(storeName) || globalStateBuilders.containsKey(storeName)) {
throw new TopologyException("StateStore " + storeName + " is already added.");
}
if (loggingEnabled) {
throw new TopologyException("StateStore " + storeName + " for global table must not have logging enabled.");
}
if (sourceName.equals(processorName)) {
throw new TopologyException("sourceName and processorName must be different.");
}
}
private void connectProcessorAndStateStore(final String processorName,
final String stateStoreName) {
if (globalStateBuilders.containsKey(stateStoreName)) {
throw new TopologyException("Global StateStore " + stateStoreName +
" can be used by a Processor without being specified; it should not be explicitly passed.");
}
if (!stateFactories.containsKey(stateStoreName)) {
throw new TopologyException("StateStore " + stateStoreName + " is not added yet.");
}
if (!nodeFactories.containsKey(processorName)) {
throw new TopologyException("Processor " + processorName + " is not added yet.");
}
final StateStoreFactory stateStoreFactory = stateFactories.get(stateStoreName);
final Iterator<String> iter = stateStoreFactory.users().iterator();
if (iter.hasNext()) {
final String user = iter.next();
nodeGrouper.unite(user, processorName);
}
stateStoreFactory.users().add(processorName);
final NodeFactory nodeFactory = nodeFactories.get(processorName);
if (nodeFactory instanceof ProcessorNodeFactory) {
final ProcessorNodeFactory processorNodeFactory = (ProcessorNodeFactory) nodeFactory;
processorNodeFactory.addStateStore(stateStoreName);
connectStateStoreNameToSourceTopicsOrPattern(stateStoreName, processorNodeFactory);
} else {
throw new TopologyException("cannot connect a state store " + stateStoreName + " to a source node or a sink node.");
}
}
private Set<SourceNodeFactory> findSourcesForProcessorPredecessors(final String[] predecessors) {
final Set<SourceNodeFactory> sourceNodes = new HashSet<>();
for (final String predecessor : predecessors) {
final NodeFactory nodeFactory = nodeFactories.get(predecessor);
if (nodeFactory instanceof SourceNodeFactory) {
sourceNodes.add((SourceNodeFactory) nodeFactory);
} else if (nodeFactory instanceof ProcessorNodeFactory) {
sourceNodes.addAll(findSourcesForProcessorPredecessors(((ProcessorNodeFactory) nodeFactory).predecessors));
}
}
return sourceNodes;
}
private void connectStateStoreNameToSourceTopicsOrPattern(final String stateStoreName,
final ProcessorNodeFactory processorNodeFactory) {
// we should never update the mapping from state store names to source topics if the store name already exists
// in the map; this scenario is possible, for example, that a state store underlying a source KTable is
// connecting to a join operator whose source topic is not the original KTable's source topic but an internal repartition topic.
if (stateStoreNameToSourceTopics.containsKey(stateStoreName)
|| stateStoreNameToSourceRegex.containsKey(stateStoreName)) {
return;
}
final Set<String> sourceTopics = new HashSet<>();
final Set<Pattern> sourcePatterns = new HashSet<>();
final Set<SourceNodeFactory> sourceNodesForPredecessor =
findSourcesForProcessorPredecessors(processorNodeFactory.predecessors);
for (final SourceNodeFactory sourceNodeFactory : sourceNodesForPredecessor) {
if (sourceNodeFactory.pattern != null) {
sourcePatterns.add(sourceNodeFactory.pattern);
} else {
sourceTopics.addAll(sourceNodeFactory.topics);
}
}
if (!sourceTopics.isEmpty()) {
stateStoreNameToSourceTopics.put(stateStoreName,
Collections.unmodifiableSet(sourceTopics));
}
if (!sourcePatterns.isEmpty()) {
stateStoreNameToSourceRegex.put(stateStoreName,
Collections.unmodifiableSet(sourcePatterns));
}
}
private <T> void maybeAddToResetList(final Collection<T> earliestResets,
final Collection<T> latestResets,
final Topology.AutoOffsetReset offsetReset,
final T item) {
if (offsetReset != null) {
switch (offsetReset) {
case EARLIEST:
earliestResets.add(item);
break;
case LATEST:
latestResets.add(item);
break;
default:
throw new TopologyException(String.format("Unrecognized reset format %s", offsetReset));
}
}
}
public synchronized Map<Integer, Set<String>> nodeGroups() {
if (nodeGroups == null) {
nodeGroups = makeNodeGroups();
}
return nodeGroups;
}
private Map<Integer, Set<String>> makeNodeGroups() {
final Map<Integer, Set<String>> nodeGroups = new LinkedHashMap<>();
final Map<String, Set<String>> rootToNodeGroup = new HashMap<>();
int nodeGroupId = 0;
// Go through source nodes first. This makes the group id assignment easy to predict in tests
final Set<String> allSourceNodes = new HashSet<>(nodeToSourceTopics.keySet());
allSourceNodes.addAll(nodeToSourcePatterns.keySet());
for (final String nodeName : Utils.sorted(allSourceNodes)) {
nodeGroupId = putNodeGroupName(nodeName, nodeGroupId, nodeGroups, rootToNodeGroup);
}
// Go through non-source nodes
for (final String nodeName : Utils.sorted(nodeFactories.keySet())) {
if (!nodeToSourceTopics.containsKey(nodeName)) {
nodeGroupId = putNodeGroupName(nodeName, nodeGroupId, nodeGroups, rootToNodeGroup);
}
}
return nodeGroups;
}
private int putNodeGroupName(final String nodeName,
final int nodeGroupId,
final Map<Integer, Set<String>> nodeGroups,
final Map<String, Set<String>> rootToNodeGroup) {
int newNodeGroupId = nodeGroupId;
final String root = nodeGrouper.root(nodeName);
Set<String> nodeGroup = rootToNodeGroup.get(root);
if (nodeGroup == null) {
nodeGroup = new HashSet<>();
rootToNodeGroup.put(root, nodeGroup);
nodeGroups.put(newNodeGroupId++, nodeGroup);
}
nodeGroup.add(nodeName);
return newNodeGroupId;
}
public synchronized ProcessorTopology build() {
return build((Integer) null);
}
public synchronized ProcessorTopology build(final Integer topicGroupId) {
final Set<String> nodeGroup;
if (topicGroupId != null) {
nodeGroup = nodeGroups().get(topicGroupId);
} else {
// when topicGroupId is null, we build the full topology minus the global groups
final Set<String> globalNodeGroups = globalNodeGroups();
final Collection<Set<String>> values = nodeGroups().values();
nodeGroup = new HashSet<>();
for (final Set<String> value : values) {
nodeGroup.addAll(value);
}
nodeGroup.removeAll(globalNodeGroups);
}
return build(nodeGroup);
}
/**
* Builds the topology for any global state stores
* @return ProcessorTopology
*/
public synchronized ProcessorTopology buildGlobalStateTopology() {
Objects.requireNonNull(applicationId, "topology has not completed optimization");
final Set<String> globalGroups = globalNodeGroups();
if (globalGroups.isEmpty()) {
return null;
}
return build(globalGroups);
}
private Set<String> globalNodeGroups() {
final Set<String> globalGroups = new HashSet<>();
for (final Map.Entry<Integer, Set<String>> nodeGroup : nodeGroups().entrySet()) {
final Set<String> nodes = nodeGroup.getValue();
for (final String node : nodes) {
if (isGlobalSource(node)) {
globalGroups.addAll(nodes);
}
}
}
return globalGroups;
}
private ProcessorTopology build(final Set<String> nodeGroup) {
Objects.requireNonNull(applicationId, "topology has not completed optimization");
final Map<String, ProcessorNode> processorMap = new LinkedHashMap<>();
final Map<String, SourceNode> topicSourceMap = new HashMap<>();
final Map<String, SinkNode> topicSinkMap = new HashMap<>();
final Map<String, StateStore> stateStoreMap = new LinkedHashMap<>();
final Set<String> repartitionTopics = new HashSet<>();
// create processor nodes in a topological order ("nodeFactories" is already topologically sorted)
// also make sure the state store map values following the insertion ordering
for (final NodeFactory factory : nodeFactories.values()) {
if (nodeGroup == null || nodeGroup.contains(factory.name)) {
final ProcessorNode node = factory.build();
processorMap.put(node.name(), node);
if (factory instanceof ProcessorNodeFactory) {
buildProcessorNode(processorMap,
stateStoreMap,
(ProcessorNodeFactory) factory,
node);
} else if (factory instanceof SourceNodeFactory) {
buildSourceNode(topicSourceMap,
repartitionTopics,
(SourceNodeFactory) factory,
(SourceNode) node);
} else if (factory instanceof SinkNodeFactory) {
buildSinkNode(processorMap,
topicSinkMap,
repartitionTopics,
(SinkNodeFactory) factory,
(SinkNode) node);
} else {
throw new TopologyException("Unknown definition class: " + factory.getClass().getName());
}
}
}
return new ProcessorTopology(new ArrayList<>(processorMap.values()),
topicSourceMap,
topicSinkMap,
new ArrayList<>(stateStoreMap.values()),
new ArrayList<>(globalStateStores.values()),
storeToChangelogTopic,
repartitionTopics);
}
@SuppressWarnings("unchecked")
private void buildSinkNode(final Map<String, ProcessorNode> processorMap,
final Map<String, SinkNode> topicSinkMap,
final Set<String> repartitionTopics,
final SinkNodeFactory sinkNodeFactory,
final SinkNode node) {
for (final String predecessor : sinkNodeFactory.predecessors) {
processorMap.get(predecessor).addChild(node);
if (sinkNodeFactory.topicExtractor instanceof StaticTopicNameExtractor) {
final String topic = ((StaticTopicNameExtractor) sinkNodeFactory.topicExtractor).topicName;
if (internalTopicNames.contains(topic)) {
// prefix the internal topic name with the application id
final String decoratedTopic = decorateTopic(topic);
topicSinkMap.put(decoratedTopic, node);
repartitionTopics.add(decoratedTopic);
} else {
topicSinkMap.put(topic, node);
}
}
}
}
private void buildSourceNode(final Map<String, SourceNode> topicSourceMap,
final Set<String> repartitionTopics,
final SourceNodeFactory sourceNodeFactory,
final SourceNode node) {
final List<String> topics = (sourceNodeFactory.pattern != null) ?
sourceNodeFactory.getTopics(subscriptionUpdates.getUpdates()) :
sourceNodeFactory.topics;
for (final String topic : topics) {
if (internalTopicNames.contains(topic)) {
// prefix the internal topic name with the application id
final String decoratedTopic = decorateTopic(topic);
topicSourceMap.put(decoratedTopic, node);
repartitionTopics.add(decoratedTopic);
} else {
topicSourceMap.put(topic, node);
}
}
}
private void buildProcessorNode(final Map<String, ProcessorNode> processorMap,
final Map<String, StateStore> stateStoreMap,
final ProcessorNodeFactory factory,
final ProcessorNode node) {
for (final String predecessor : factory.predecessors) {
final ProcessorNode<?, ?> predecessorNode = processorMap.get(predecessor);
predecessorNode.addChild(node);
}
for (final String stateStoreName : factory.stateStoreNames) {
if (!stateStoreMap.containsKey(stateStoreName)) {
if (stateFactories.containsKey(stateStoreName)) {
final StateStoreFactory stateStoreFactory = stateFactories.get(stateStoreName);
// remember the changelog topic if this state store is change-logging enabled
if (stateStoreFactory.loggingEnabled() && !storeToChangelogTopic.containsKey(stateStoreName)) {
final String changelogTopic = ProcessorStateManager.storeChangelogTopic(applicationId, stateStoreName);
storeToChangelogTopic.put(stateStoreName, changelogTopic);
}
stateStoreMap.put(stateStoreName, stateStoreFactory.build());
} else {
stateStoreMap.put(stateStoreName, globalStateStores.get(stateStoreName));
}
}
}
}
/**
* Get any global {@link StateStore}s that are part of the
* topology
* @return map containing all global {@link StateStore}s
*/
public Map<String, StateStore> globalStateStores() {
Objects.requireNonNull(applicationId, "topology has not completed optimization");
return Collections.unmodifiableMap(globalStateStores);
}
public Set<String> allStateStoreName() {
Objects.requireNonNull(applicationId, "topology has not completed optimization");
final Set<String> allNames = new HashSet<>(stateFactories.keySet());
allNames.addAll(globalStateStores.keySet());
return Collections.unmodifiableSet(allNames);
}
/**
* Returns the map of topic groups keyed by the group id.
* A topic group is a group of topics in the same task.
*
* @return groups of topic names
*/
public synchronized Map<Integer, TopicsInfo> topicGroups() {
final Map<Integer, TopicsInfo> topicGroups = new LinkedHashMap<>();
if (nodeGroups == null) {
nodeGroups = makeNodeGroups();
}
for (final Map.Entry<Integer, Set<String>> entry : nodeGroups.entrySet()) {
final Set<String> sinkTopics = new HashSet<>();
final Set<String> sourceTopics = new HashSet<>();
final Map<String, InternalTopicConfig> repartitionTopics = new HashMap<>();
final Map<String, InternalTopicConfig> stateChangelogTopics = new HashMap<>();
for (final String node : entry.getValue()) {
// if the node is a source node, add to the source topics
final List<String> topics = nodeToSourceTopics.get(node);
if (topics != null) {
// if some of the topics are internal, add them to the internal topics
for (final String topic : topics) {
// skip global topic as they don't need partition assignment
if (globalTopics.contains(topic)) {
continue;
}
if (internalTopicNames.contains(topic)) {
// prefix the internal topic name with the application id
final String internalTopic = decorateTopic(topic);
repartitionTopics.put(
internalTopic,
new RepartitionTopicConfig(internalTopic, Collections.emptyMap()));
sourceTopics.add(internalTopic);
} else {
sourceTopics.add(topic);
}
}
}
// if the node is a sink node, add to the sink topics
final String topic = nodeToSinkTopic.get(node);
if (topic != null) {
if (internalTopicNames.contains(topic)) {
// prefix the change log topic name with the application id
sinkTopics.add(decorateTopic(topic));
} else {
sinkTopics.add(topic);
}
}
// if the node is connected to a state store whose changelog topics are not predefined,
// add to the changelog topics
for (final StateStoreFactory stateFactory : stateFactories.values()) {
if (stateFactory.loggingEnabled() && stateFactory.users().contains(node)) {
final String topicName = storeToChangelogTopic.containsKey(stateFactory.name()) ?
storeToChangelogTopic.get(stateFactory.name()) :
ProcessorStateManager.storeChangelogTopic(applicationId, stateFactory.name());
if (!stateChangelogTopics.containsKey(topicName)) {
final InternalTopicConfig internalTopicConfig =
createChangelogTopicConfig(stateFactory, topicName);
stateChangelogTopics.put(topicName, internalTopicConfig);
}
}
}
}
if (!sourceTopics.isEmpty()) {
topicGroups.put(entry.getKey(), new TopicsInfo(
Collections.unmodifiableSet(sinkTopics),
Collections.unmodifiableSet(sourceTopics),
Collections.unmodifiableMap(repartitionTopics),
Collections.unmodifiableMap(stateChangelogTopics)));
}
}
return Collections.unmodifiableMap(topicGroups);
}
private void setRegexMatchedTopicsToSourceNodes() {
if (subscriptionUpdates.hasUpdates()) {
for (final Map.Entry<String, Pattern> stringPatternEntry : nodeToSourcePatterns.entrySet()) {
final SourceNodeFactory sourceNode =
(SourceNodeFactory) nodeFactories.get(stringPatternEntry.getKey());
//need to update nodeToSourceTopics with topics matched from given regex
nodeToSourceTopics.put(
stringPatternEntry.getKey(),
sourceNode.getTopics(subscriptionUpdates.getUpdates()));
log.debug("nodeToSourceTopics {}", nodeToSourceTopics);
}
}
}
private void setRegexMatchedTopicToStateStore() {
if (subscriptionUpdates.hasUpdates()) {
for (final Map.Entry<String, Set<Pattern>> storePattern : stateStoreNameToSourceRegex.entrySet()) {
final Set<String> updatedTopicsForStateStore = new HashSet<>();
for (final String subscriptionUpdateTopic : subscriptionUpdates.getUpdates()) {
for (final Pattern pattern : storePattern.getValue()) {
if (pattern.matcher(subscriptionUpdateTopic).matches()) {
updatedTopicsForStateStore.add(subscriptionUpdateTopic);
}
}
}
if (!updatedTopicsForStateStore.isEmpty()) {
final Collection<String> storeTopics = stateStoreNameToSourceTopics.get(storePattern.getKey());
if (storeTopics != null) {
updatedTopicsForStateStore.addAll(storeTopics);
}
stateStoreNameToSourceTopics.put(
storePattern.getKey(),
Collections.unmodifiableSet(updatedTopicsForStateStore));
}
}
}
}
private InternalTopicConfig createChangelogTopicConfig(final StateStoreFactory factory,
final String name) {
if (factory.isWindowStore()) {
final WindowedChangelogTopicConfig config = new WindowedChangelogTopicConfig(name, factory.logConfig());
config.setRetentionMs(factory.retentionPeriod());
return config;
} else {
return new UnwindowedChangelogTopicConfig(name, factory.logConfig());
}
}
public synchronized Pattern earliestResetTopicsPattern() {
return resetTopicsPattern(earliestResetTopics, earliestResetPatterns);
}
public synchronized Pattern latestResetTopicsPattern() {
return resetTopicsPattern(latestResetTopics, latestResetPatterns);
}
private Pattern resetTopicsPattern(final Set<String> resetTopics,
final Set<Pattern> resetPatterns) {
final List<String> topics = maybeDecorateInternalSourceTopics(resetTopics);
return buildPatternForOffsetResetTopics(topics, resetPatterns);
}
private static Pattern buildPatternForOffsetResetTopics(final Collection<String> sourceTopics,
final Collection<Pattern> sourcePatterns) {
final StringBuilder builder = new StringBuilder();
for (final String topic : sourceTopics) {
builder.append(topic).append("|");
}
for (final Pattern sourcePattern : sourcePatterns) {
builder.append(sourcePattern.pattern()).append("|");
}
if (builder.length() > 0) {
builder.setLength(builder.length() - 1);
return Pattern.compile(builder.toString());
}
return EMPTY_ZERO_LENGTH_PATTERN;
}
public Map<String, List<String>> stateStoreNameToSourceTopics() {
final Map<String, List<String>> results = new HashMap<>();
for (final Map.Entry<String, Set<String>> entry : stateStoreNameToSourceTopics.entrySet()) {
results.put(entry.getKey(), maybeDecorateInternalSourceTopics(entry.getValue()));
}
return results;
}
public synchronized Collection<Set<String>> copartitionGroups() {
final List<Set<String>> list = new ArrayList<>(copartitionSourceGroups.size());
for (final Set<String> nodeNames : copartitionSourceGroups) {
final Set<String> copartitionGroup = new HashSet<>();
for (final String node : nodeNames) {
final List<String> topics = nodeToSourceTopics.get(node);
if (topics != null) {
copartitionGroup.addAll(maybeDecorateInternalSourceTopics(topics));
}
}
list.add(Collections.unmodifiableSet(copartitionGroup));
}
return Collections.unmodifiableList(list);
}
private List<String> maybeDecorateInternalSourceTopics(final Collection<String> sourceTopics) {
final List<String> decoratedTopics = new ArrayList<>();
for (final String topic : sourceTopics) {
if (internalTopicNames.contains(topic)) {
decoratedTopics.add(decorateTopic(topic));
} else {
decoratedTopics.add(topic);
}
}
return decoratedTopics;
}
private String decorateTopic(final String topic) {
if (applicationId == null) {
throw new TopologyException("there are internal topics and "
+ "applicationId hasn't been set. Call "
+ "setApplicationId first");
}
return applicationId + "-" + topic;
}
SubscriptionUpdates subscriptionUpdates() {
return subscriptionUpdates;
}
synchronized Pattern sourceTopicPattern() {
if (topicPattern == null) {
final List<String> allSourceTopics = new ArrayList<>();
if (!nodeToSourceTopics.isEmpty()) {
for (final List<String> topics : nodeToSourceTopics.values()) {
allSourceTopics.addAll(maybeDecorateInternalSourceTopics(topics));
}
}
Collections.sort(allSourceTopics);
topicPattern = buildPatternForOffsetResetTopics(allSourceTopics, nodeToSourcePatterns.values());
}
return topicPattern;
}
// package-private for testing only
synchronized void updateSubscriptions(final SubscriptionUpdates subscriptionUpdates,
final String logPrefix) {
log.debug("{}updating builder with {} topic(s) with possible matching regex subscription(s)",
logPrefix, subscriptionUpdates);
this.subscriptionUpdates = subscriptionUpdates;
setRegexMatchedTopicsToSourceNodes();
setRegexMatchedTopicToStateStore();
}
private boolean isGlobalSource(final String nodeName) {
final NodeFactory nodeFactory = nodeFactories.get(nodeName);
if (nodeFactory instanceof SourceNodeFactory) {
final List<String> topics = ((SourceNodeFactory) nodeFactory).topics;
return topics != null && topics.size() == 1 && globalTopics.contains(topics.get(0));
}
return false;
}
public TopologyDescription describe() {
final TopologyDescription description = new TopologyDescription();
for (final Map.Entry<Integer, Set<String>> nodeGroup : makeNodeGroups().entrySet()) {
final Set<String> allNodesOfGroups = nodeGroup.getValue();
final boolean isNodeGroupOfGlobalStores = nodeGroupContainsGlobalSourceNode(allNodesOfGroups);
if (!isNodeGroupOfGlobalStores) {
describeSubtopology(description, nodeGroup.getKey(), allNodesOfGroups);
} else {
describeGlobalStore(description, allNodesOfGroups, nodeGroup.getKey());
}
}
return description;
}
private void describeGlobalStore(final TopologyDescription description,
final Set<String> nodes,
final int id) {
final Iterator<String> it = nodes.iterator();
while (it.hasNext()) {
final String node = it.next();
if (isGlobalSource(node)) {
// we found a GlobalStore node group; those contain exactly two node: {sourceNode,processorNode}
it.remove(); // remove sourceNode from group
final String processorNode = nodes.iterator().next(); // get remaining processorNode
description.addGlobalStore(new GlobalStore(
node,
processorNode,
((ProcessorNodeFactory) nodeFactories.get(processorNode)).stateStoreNames.iterator().next(),
nodeToSourceTopics.get(node).get(0),
id
));
break;
}
}
}
private boolean nodeGroupContainsGlobalSourceNode(final Set<String> allNodesOfGroups) {
for (final String node : allNodesOfGroups) {
if (isGlobalSource(node)) {
return true;
}
}
return false;
}
private static class NodeComparator implements Comparator<TopologyDescription.Node>, Serializable {
@Override
public int compare(final TopologyDescription.Node node1,
final TopologyDescription.Node node2) {
final int size1 = ((AbstractNode) node1).size;
final int size2 = ((AbstractNode) node2).size;
// it is possible that two nodes have the same sub-tree size (think two nodes connected via state stores)
// in this case default to processor name string
if (size1 != size2) {
return size2 - size1;
} else {
return node1.name().compareTo(node2.name());
}
}
}
private final static NodeComparator NODE_COMPARATOR = new NodeComparator();
private static void updateSize(final AbstractNode node,
final int delta) {
node.size += delta;
for (final TopologyDescription.Node predecessor : node.predecessors()) {
updateSize((AbstractNode) predecessor, delta);
}
}
private void describeSubtopology(final TopologyDescription description,
final Integer subtopologyId,
final Set<String> nodeNames) {
final Map<String, AbstractNode> nodesByName = new HashMap<>();
// add all nodes
for (final String nodeName : nodeNames) {
nodesByName.put(nodeName, nodeFactories.get(nodeName).describe());
}
// connect each node to its predecessors and successors
for (final AbstractNode node : nodesByName.values()) {
for (final String predecessorName : nodeFactories.get(node.name()).predecessors) {
final AbstractNode predecessor = nodesByName.get(predecessorName);
node.addPredecessor(predecessor);
predecessor.addSuccessor(node);
updateSize(predecessor, node.size);
}
}
description.addSubtopology(new Subtopology(
subtopologyId,
new HashSet<>(nodesByName.values())));
}
public final static class GlobalStore implements TopologyDescription.GlobalStore {
private final Source source;
private final Processor processor;
private final int id;
public GlobalStore(final String sourceName,
final String processorName,
final String storeName,
final String topicName,
final int id) {
source = new Source(sourceName, Collections.singleton(topicName), null);
processor = new Processor(processorName, Collections.singleton(storeName));
source.successors.add(processor);
processor.predecessors.add(source);
this.id = id;
}
@Override
public int id() {
return id;
}
@Override
public TopologyDescription.Source source() {
return source;
}
@Override
public TopologyDescription.Processor processor() {
return processor;
}
@Override
public String toString() {
return "Sub-topology: " + id + " for global store (will not generate tasks)\n"
+ " " + source.toString() + "\n"
+ " " + processor.toString() + "\n";
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final GlobalStore that = (GlobalStore) o;
return source.equals(that.source)
&& processor.equals(that.processor);
}
@Override
public int hashCode() {
return Objects.hash(source, processor);
}
}
public abstract static class AbstractNode implements TopologyDescription.Node {
final String name;
final Set<TopologyDescription.Node> predecessors = new TreeSet<>(NODE_COMPARATOR);
final Set<TopologyDescription.Node> successors = new TreeSet<>(NODE_COMPARATOR);
// size of the sub-topology rooted at this node, including the node itself
int size;
AbstractNode(final String name) {
this.name = name;
this.size = 1;
}
@Override
public String name() {
return name;
}
@Override
public Set<TopologyDescription.Node> predecessors() {
return Collections.unmodifiableSet(predecessors);
}
@Override
public Set<TopologyDescription.Node> successors() {
return Collections.unmodifiableSet(successors);
}
public void addPredecessor(final TopologyDescription.Node predecessor) {
predecessors.add(predecessor);
}
public void addSuccessor(final TopologyDescription.Node successor) {
successors.add(successor);
}
}
public final static class Source extends AbstractNode implements TopologyDescription.Source {
private final Set<String> topics;
private final Pattern topicPattern;
public Source(final String name,
final Set<String> topics,
final Pattern pattern) {
super(name);
this.topics = topics;
this.topicPattern = pattern;
}
@Deprecated
@Override
public String topics() {
return topics.toString();
}
@Override
public Set<String> topicSet() {
return topics;
}
@Override
public Pattern topicPattern() {
return topicPattern;
}
@Override
public void addPredecessor(final TopologyDescription.Node predecessor) {
throw new UnsupportedOperationException("Sources don't have predecessors.");
}
@Override
public String toString() {
final String topicsString = topics == null ? topicPattern.toString() : topics.toString();
return "Source: " + name + " (topics: " + topicsString + ")\n --> " + nodeNames(successors);
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Source source = (Source) o;
// omit successor to avoid infinite loops
return name.equals(source.name)
&& topics.equals(source.topics)
&& topicPattern.equals(source.topicPattern);
}
@Override
public int hashCode() {
// omit successor as it might change and alter the hash code
return Objects.hash(name, topics, topicPattern);
}
}
public final static class Processor extends AbstractNode implements TopologyDescription.Processor {
private final Set<String> stores;
public Processor(final String name,
final Set<String> stores) {
super(name);
this.stores = stores;
}
@Override
public Set<String> stores() {
return Collections.unmodifiableSet(stores);
}
@Override
public String toString() {
return "Processor: " + name + " (stores: " + stores + ")\n --> "
+ nodeNames(successors) + "\n <-- " + nodeNames(predecessors);
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Processor processor = (Processor) o;
// omit successor to avoid infinite loops
return name.equals(processor.name)
&& stores.equals(processor.stores)
&& predecessors.equals(processor.predecessors);
}
@Override
public int hashCode() {
// omit successor as it might change and alter the hash code
return Objects.hash(name, stores);
}
}
public final static class Sink extends AbstractNode implements TopologyDescription.Sink {
private final TopicNameExtractor topicNameExtractor;
public Sink(final String name,
final TopicNameExtractor topicNameExtractor) {
super(name);
this.topicNameExtractor = topicNameExtractor;
}
public Sink(final String name,
final String topic) {
super(name);
this.topicNameExtractor = new StaticTopicNameExtractor(topic);
}
@Override
public String topic() {
if (topicNameExtractor instanceof StaticTopicNameExtractor) {
return ((StaticTopicNameExtractor) topicNameExtractor).topicName;
} else {
return null;
}
}
@Override
public TopicNameExtractor topicNameExtractor() {
if (topicNameExtractor instanceof StaticTopicNameExtractor) {
return null;
} else {
return topicNameExtractor;
}
}
@Override
public void addSuccessor(final TopologyDescription.Node successor) {
throw new UnsupportedOperationException("Sinks don't have successors.");
}
@Override
public String toString() {
if (topicNameExtractor instanceof StaticTopicNameExtractor) {
return "Sink: " + name + " (topic: " + topic() + ")\n <-- " + nodeNames(predecessors);
}
return "Sink: " + name + " (extractor class: " + topicNameExtractor + ")\n <-- "
+ nodeNames(predecessors);
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Sink sink = (Sink) o;
return name.equals(sink.name)
&& topicNameExtractor.equals(sink.topicNameExtractor)
&& predecessors.equals(sink.predecessors);
}
@Override
public int hashCode() {
// omit predecessors as it might change and alter the hash code
return Objects.hash(name, topicNameExtractor);
}
}
public final static class Subtopology implements org.apache.kafka.streams.TopologyDescription.Subtopology {
private final int id;
private final Set<TopologyDescription.Node> nodes;
public Subtopology(final int id, final Set<TopologyDescription.Node> nodes) {
this.id = id;
this.nodes = new TreeSet<>(NODE_COMPARATOR);
this.nodes.addAll(nodes);
}
@Override
public int id() {
return id;
}
@Override
public Set<TopologyDescription.Node> nodes() {
return Collections.unmodifiableSet(nodes);
}
// visible for testing
Iterator<TopologyDescription.Node> nodesInOrder() {
return nodes.iterator();
}
@Override
public String toString() {
return "Sub-topology: " + id + "\n" + nodesAsString() + "\n";
}
private String nodesAsString() {
final StringBuilder sb = new StringBuilder();
for (final TopologyDescription.Node node : nodes) {
sb.append(" ");
sb.append(node);
sb.append('\n');
}
return sb.toString();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Subtopology that = (Subtopology) o;
return id == that.id
&& nodes.equals(that.nodes);
}
@Override
public int hashCode() {
return Objects.hash(id, nodes);
}
}
public static class TopicsInfo {
final Set<String> sinkTopics;
final Set<String> sourceTopics;
public final Map<String, InternalTopicConfig> stateChangelogTopics;
public final Map<String, InternalTopicConfig> repartitionSourceTopics;
TopicsInfo(final Set<String> sinkTopics,
final Set<String> sourceTopics,
final Map<String, InternalTopicConfig> repartitionSourceTopics,
final Map<String, InternalTopicConfig> stateChangelogTopics) {
this.sinkTopics = sinkTopics;
this.sourceTopics = sourceTopics;
this.stateChangelogTopics = stateChangelogTopics;
this.repartitionSourceTopics = repartitionSourceTopics;
}
@Override
public boolean equals(final Object o) {
if (o instanceof TopicsInfo) {
final TopicsInfo other = (TopicsInfo) o;
return other.sourceTopics.equals(sourceTopics) && other.stateChangelogTopics.equals(stateChangelogTopics);
} else {
return false;
}
}
@Override
public int hashCode() {
final long n = ((long) sourceTopics.hashCode() << 32) | (long) stateChangelogTopics.hashCode();
return (int) (n % 0xFFFFFFFFL);
}
@Override
public String toString() {
return "TopicsInfo{" +
"sinkTopics=" + sinkTopics +
", sourceTopics=" + sourceTopics +
", repartitionSourceTopics=" + repartitionSourceTopics +
", stateChangelogTopics=" + stateChangelogTopics +
'}';
}
}
private static class GlobalStoreComparator implements Comparator<TopologyDescription.GlobalStore>, Serializable {
@Override
public int compare(final TopologyDescription.GlobalStore globalStore1,
final TopologyDescription.GlobalStore globalStore2) {
return globalStore1.id() - globalStore2.id();
}
}
private final static GlobalStoreComparator GLOBALSTORE_COMPARATOR = new GlobalStoreComparator();
private static class SubtopologyComparator implements Comparator<TopologyDescription.Subtopology>, Serializable {
@Override
public int compare(final TopologyDescription.Subtopology subtopology1,
final TopologyDescription.Subtopology subtopology2) {
return subtopology1.id() - subtopology2.id();
}
}
private final static SubtopologyComparator SUBTOPOLOGY_COMPARATOR = new SubtopologyComparator();
public final static class TopologyDescription implements org.apache.kafka.streams.TopologyDescription {
private final TreeSet<TopologyDescription.Subtopology> subtopologies = new TreeSet<>(SUBTOPOLOGY_COMPARATOR);
private final TreeSet<TopologyDescription.GlobalStore> globalStores = new TreeSet<>(GLOBALSTORE_COMPARATOR);
public void addSubtopology(final TopologyDescription.Subtopology subtopology) {
subtopologies.add(subtopology);
}
public void addGlobalStore(final TopologyDescription.GlobalStore globalStore) {
globalStores.add(globalStore);
}
@Override
public Set<TopologyDescription.Subtopology> subtopologies() {
return Collections.unmodifiableSet(subtopologies);
}
@Override
public Set<TopologyDescription.GlobalStore> globalStores() {
return Collections.unmodifiableSet(globalStores);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Topologies:\n ");
final TopologyDescription.Subtopology[] sortedSubtopologies =
subtopologies.descendingSet().toArray(new Subtopology[0]);
final TopologyDescription.GlobalStore[] sortedGlobalStores =
globalStores.descendingSet().toArray(new GlobalStore[0]);
int expectedId = 0;
int subtopologiesIndex = sortedSubtopologies.length - 1;
int globalStoresIndex = sortedGlobalStores.length - 1;
while (subtopologiesIndex != -1 && globalStoresIndex != -1) {
sb.append(" ");
final TopologyDescription.Subtopology subtopology = sortedSubtopologies[subtopologiesIndex];
final TopologyDescription.GlobalStore globalStore = sortedGlobalStores[globalStoresIndex];
if (subtopology.id() == expectedId) {
sb.append(subtopology);
subtopologiesIndex--;
} else {
sb.append(globalStore);
globalStoresIndex--;
}
expectedId++;
}
while (subtopologiesIndex != -1) {
final TopologyDescription.Subtopology subtopology = sortedSubtopologies[subtopologiesIndex];
sb.append(" ");
sb.append(subtopology);
subtopologiesIndex--;
}
while (globalStoresIndex != -1) {
final TopologyDescription.GlobalStore globalStore = sortedGlobalStores[globalStoresIndex];
sb.append(" ");
sb.append(globalStore);
globalStoresIndex--;
}
return sb.toString();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final TopologyDescription that = (TopologyDescription) o;
return subtopologies.equals(that.subtopologies)
&& globalStores.equals(that.globalStores);
}
@Override
public int hashCode() {
return Objects.hash(subtopologies, globalStores);
}
}
private static String nodeNames(final Set<TopologyDescription.Node> nodes) {
final StringBuilder sb = new StringBuilder();
if (!nodes.isEmpty()) {
for (final TopologyDescription.Node n : nodes) {
sb.append(n.name());
sb.append(", ");
}
sb.deleteCharAt(sb.length() - 1);
sb.deleteCharAt(sb.length() - 1);
} else {
return "none";
}
return sb.toString();
}
/**
* Used to capture subscribed topic via Patterns discovered during the
* partition assignment process.
*/
public static class SubscriptionUpdates {
private final Set<String> updatedTopicSubscriptions = new HashSet<>();
private void updateTopics(final Collection<String> topicNames) {
updatedTopicSubscriptions.clear();
updatedTopicSubscriptions.addAll(topicNames);
}
public Collection<String> getUpdates() {
return Collections.unmodifiableSet(updatedTopicSubscriptions);
}
boolean hasUpdates() {
return !updatedTopicSubscriptions.isEmpty();
}
@Override
public String toString() {
return String.format("SubscriptionUpdates{updatedTopicSubscriptions=%s}", updatedTopicSubscriptions);
}
}
void updateSubscribedTopics(final Set<String> topics,
final String logPrefix) {
final SubscriptionUpdates subscriptionUpdates = new SubscriptionUpdates();
log.debug("{}found {} topics possibly matching regex", logPrefix, topics);
// update the topic groups with the returned subscription set for regex pattern subscriptions
subscriptionUpdates.updateTopics(topics);
updateSubscriptions(subscriptionUpdates, logPrefix);
}
// following functions are for test only
public synchronized Set<String> getSourceTopicNames() {
return sourceTopicNames;
}
public synchronized Map<String, StateStoreFactory> getStateStores() {
return stateFactories;
}
}
|
package de.polocloud.api.network.protocol.packet.api.gameserver;
import de.polocloud.api.network.protocol.buffer.IPacketBuffer;
import de.polocloud.api.network.protocol.packet.Packet;
import io.netty.buffer.ByteBuf;
import java.io.IOException;
public class APIRequestGameServerCopyResponsePacket extends Packet {
private String gameservername;
private boolean failed;
private String errorMessage;
public APIRequestGameServerCopyResponsePacket() {
}
public APIRequestGameServerCopyResponsePacket(String gameservername, boolean failed, String errorMessage) {
this.gameservername = gameservername;
this.failed = failed;
this.errorMessage = errorMessage;
}
@Override
public void write(IPacketBuffer buf) throws IOException {
buf.writeString(gameservername);
buf.writeString(String.valueOf(failed));
buf.writeString(errorMessage);
}
@Override
public void read(IPacketBuffer buf) throws IOException {
this.gameservername = buf.readString();
this.failed = Boolean.parseBoolean(buf.readString());
this.errorMessage = buf.readString();
}
public String getGameservername() {
return gameservername;
}
public boolean isFailed() {
return failed;
}
public String getErrorMessage() {
return errorMessage;
}
}
|
/* Copyright 2016 Urban Airship and Contributors */
package com.urbanairship.util;
import android.content.Context;
import android.content.SharedPreferences;
import android.support.annotation.IntRange;
import com.urbanairship.Logger;
import com.urbanairship.UAirship;
/**
* An auto-incrementing notification ID generator.
*/
public class NotificationIdGenerator {
private static final String SHARED_PREFERENCES_FILE = "com.urbanairship.notificationidgenerator";
private static final String NEXT_ID_KEY = "count";
private static final int MAX_RANGE = 50;
private static int start = 1000;
private static int range = 40; //Android allows a maximum of 50 notifications per package (undocumented)
private static SharedPreferences getPreferences() {
Context appContext = UAirship.getApplicationContext();
return appContext.getSharedPreferences(SHARED_PREFERENCES_FILE, Context.MODE_PRIVATE);
}
private static void putInt(String key, int value) {
SharedPreferences prefs = getPreferences();
SharedPreferences.Editor editor = prefs.edit();
editor.putInt(key, value);
editor.apply();
}
private static int getInt(String key, int defValue) {
return getPreferences().getInt(key, defValue);
}
//implicitly resets the count
/**
* Set the count and start value.
*
* @param value The integer value
*/
public static void setStart(int value) {
putInt(NEXT_ID_KEY, value);
start = value;
}
//implicitly resets the count
/**
* Set the number of notifications to display (max range). Implicitly resets
* the current id to {@link #getStart()}.
*
* @param newRange The number of notifications to display
*/
public static void setRange(@IntRange(from = 0, to = MAX_RANGE) int newRange) {
if (newRange > MAX_RANGE) {
Logger.error("The maximum number of notifications allowed is " + MAX_RANGE + ". Limiting alert id range to conform.");
newRange = MAX_RANGE;
}
putInt(NEXT_ID_KEY, start);
range = newRange;
}
/**
* Get the start value.
*
* @return The int start.
*/
public static int getStart() {
return start;
}
/**
* Get the range.
*
* @return The int range.
*/
public static int getRange() {
return range;
}
/**
* Store the next ID.
*
* @return The int next ID.
*/
public static int nextID() {
//get the next id from the shared prefs
int id = getInt(NEXT_ID_KEY, start);
//and write the next value back out
//store a new next id: increment by one, unless we're already at the maximum
int nextId = ++id;
if (nextId < start + range) {
Logger.verbose("NotificationIdGenerator - Incrementing notification ID count");
putInt(NEXT_ID_KEY, nextId);
}
//in which case, cycle
else {
Logger.verbose("NotificationIdGenerator - Resetting notification ID count");
putInt(NEXT_ID_KEY, start);
}
Logger.verbose("NotificationIdGenerator - Notification ID: " + id);
return id;
}
}
|
package ru.job4j.patterns.creational.builder;
/**
* BuilderPattern.
*
* Для построения сложных объектов.
*
* @author ifedorenko
* @since 31.08.2018
*/
public class BuilderPattern {
/**
* Method main.
* @param args args
*/
public static void main(String[] args) {
SportCar sc = new SportCar.Builder("Audi").setColor("red").setMaxSpeed(300).build();
Account account = Account.initializeRegistration().login("root").password("root").email("root@root.com").createAccount();
System.out.println(sc.toString());
System.out.println(account.toString());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.