hexsha stringlengths 40 40 | size int64 3 1.05M | ext stringclasses 1 value | lang stringclasses 1 value | max_stars_repo_path stringlengths 5 1.02k | max_stars_repo_name stringlengths 4 126 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses list | max_stars_count float64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 5 1.02k | max_issues_repo_name stringlengths 4 114 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses list | max_issues_count float64 1 92.2k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 5 1.02k | max_forks_repo_name stringlengths 4 136 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses list | max_forks_count float64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | avg_line_length float64 2.55 99.9 | max_line_length int64 3 1k | alphanum_fraction float64 0.25 1 | index int64 0 1M | content stringlengths 3 1.05M |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e0e717210bf58a63f10c074abce637e35803920 | 3,419 | java | Java | netconf-server-modelnode-fwk/src/test/java/org/broadband_forum/obbaa/netconf/mn/fwk/schema/validation/typevalidators/UnionTypeValidatorTest.java | BroadbandForum/obbaa-netconf-stack | 1895ed252d27863c4a01f2ae1f3ed51a7424ffc4 | [
"Apache-2.0"
] | 2 | 2021-04-26T16:11:56.000Z | 2022-03-01T06:41:29.000Z | netconf-server-modelnode-fwk/src/test/java/org/broadband_forum/obbaa/netconf/mn/fwk/schema/validation/typevalidators/UnionTypeValidatorTest.java | BroadbandForum/obbaa-netconf-stack | 1895ed252d27863c4a01f2ae1f3ed51a7424ffc4 | [
"Apache-2.0"
] | 3 | 2020-03-13T13:10:37.000Z | 2021-03-31T21:08:50.000Z | netconf-server-modelnode-fwk/src/test/java/org/broadband_forum/obbaa/netconf/mn/fwk/schema/validation/typevalidators/UnionTypeValidatorTest.java | BroadbandForum/obbaa-netconf-stack | 1895ed252d27863c4a01f2ae1f3ed51a7424ffc4 | [
"Apache-2.0"
] | 2 | 2019-05-15T05:44:37.000Z | 2021-09-05T07:38:39.000Z | 57.949153 | 260 | 0.742322 | 6,128 | package org.broadband_forum.obbaa.netconf.mn.fwk.schema.validation.typevalidators;
import org.broadband_forum.obbaa.netconf.api.util.NetconfMessageBuilderException;
import org.junit.Test;
public class UnionTypeValidatorTest extends AbstractTypeValidatorTest {
@Test
public void testValidUnion() throws NetconfMessageBuilderException {
testPass("unionvalidator/valid-union.xml");
}
@Test
public void testInValidUnion() throws NetconfMessageBuilderException {
testFail("unionvalidator/invalid-union-1.xml",
"The argument is out of bounds <-128, 0>, <10, 100>, <120, 127> or Value \"9\" is an invalid value. Expected values: [union]", "/validation:validation/validation:type-validation[validation:id='1']/validation:union-type", "range-out-of-specified-bounds");
testFail("unionvalidator/invalid-union-2.xml",
"The argument is out of bounds <-128, 0>, <10, 100>, <120, 127> or Value \"128\" is an invalid value. Expected values: [union]", "/validation:validation/validation:type-validation[validation:id='1']/validation:union-type", "range-out-of-specified-bounds");
testFail("unionvalidator/invalid-union-3.xml",
"The argument is out of bounds <-128, 0>, <10, 100>, <120, 127> or Value \"abc\" is an invalid value. Expected values: [union]", "/validation:validation/validation:type-validation[validation:id='1']/validation:union-type", "range-out-of-specified-bounds");
testFail("unionvalidator/invalid-union-4.xml",
"The argument is out of bounds <-128, 0>, <10, 100>, <120, 127> or Value \"\" is an invalid value. Expected values: [union]", "/validation:validation/validation:type-validation[validation:id='1']/validation:union-type", "range-out-of-specified-bounds");
}
@Test
public void testCustomUnion() throws NetconfMessageBuilderException {
String validCustomReq1 = "<custom-union-type>100</custom-union-type>";
testCustomPass(formRequestString(validCustomReq1));
String validCustomReq2 = "<custom-union-type>10</custom-union-type>";
testCustomPass(formRequestString(validCustomReq2));
String validCustomReq3 = "<custom-union-type>union</custom-union-type>";
testCustomPass(formRequestString(validCustomReq3));
String invalidCustomReq1 = "<custom-union-type>8</custom-union-type>";
testCustomFail(formRequestString(invalidCustomReq1),
"range constraint error-app-message or Value \"8\" is an invalid value. Expected values: [union]",
"/validation:validation/validation:type-validation[validation:id='1']/validation:custom-union-type", "range constraint error-app-tag");
String invalidCustomReq2 = "<custom-union-type>test</custom-union-type>";
testCustomFail(formRequestString(invalidCustomReq2),
"range constraint error-app-message or Value \"test\" is an invalid value. Expected values: [union]",
"/validation:validation/validation:type-validation[validation:id='1']/validation:custom-union-type", "range constraint error-app-tag");
}
@Test
public void testErrorWithoutAppTag() throws NetconfMessageBuilderException {
String invalidCustomReq1 = "<union-type-without-app-tag>8</union-type-without-app-tag>";
testCustomFail(formRequestString(invalidCustomReq1),
"Invalid value. It should be \"true\" or \"false\" instead of \"8\"",
"/validation:validation/validation:type-validation[validation:id='1']/validation:union-type-without-app-tag", "");
}
}
|
3e0e723e640488b0584cfd1204a51006eb808449 | 3,370 | java | Java | app-itests/src/itest/java/com/rideaustin/dispatch/womenonly/WOInAirportQueueIT.java | coopersystem-fsd/server | 24354717624c25b5d4faf0b7ea540e2742e8039f | [
"MIT"
] | 13 | 2020-08-20T23:51:13.000Z | 2021-08-23T17:47:14.000Z | app-itests/src/itest/java/com/rideaustin/dispatch/womenonly/WOInAirportQueueIT.java | coopersystem-fsd/server | 24354717624c25b5d4faf0b7ea540e2742e8039f | [
"MIT"
] | 3 | 2020-06-25T18:16:12.000Z | 2021-11-25T21:36:19.000Z | app-itests/src/itest/java/com/rideaustin/dispatch/womenonly/WOInAirportQueueIT.java | coopersystem-fsd/server | 24354717624c25b5d4faf0b7ea540e2742e8039f | [
"MIT"
] | 12 | 2020-06-16T20:42:18.000Z | 2022-03-10T21:45:39.000Z | 37.865169 | 121 | 0.771217 | 6,129 | package com.rideaustin.dispatch.womenonly;
import javax.inject.Inject;
import org.junit.Before;
import org.junit.Test;
import com.rideaustin.model.Area;
import com.rideaustin.model.enums.RideStatus;
import com.rideaustin.model.ride.DriverType;
import com.rideaustin.service.areaqueue.AreaQueueUpdateService;
import com.rideaustin.test.util.TestUtils;
public class WOInAirportQueueIT extends AbstractWomenOnlyDispatchTest {
@Inject
private AreaQueueUpdateService queueService;
private Area airport;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
airport = locationProvider.getAirport();
}
@Test
public void test() throws Exception {
driverAction.goOnline(regularDriver.getDriver().getEmail(), locationProvider.getAirportLocation());
queueService.updateStatuses(airport.getId());
driverAction.goOnline(woFPDriver.getDriver().getEmail(), locationProvider.getAirportLocation());
queueService.updateStatuses(airport.getId());
final Long ride = riderAction.requestRide(rider.getEmail(), locationProvider.getAirportLocation(), TestUtils.REGULAR,
new String[]{DriverType.WOMEN_ONLY, DriverType.FINGERPRINTED});
awaitDispatch(woFPDriver, ride);
driverAction.declineRide(woFPDriver.getDriver().getEmail(), ride);
awaitStatus(ride, RideStatus.NO_AVAILABLE_DRIVER);
driverAction.goOffline(regularDriver.getDriver().getEmail());
driverAction.goOffline(woFPDriver.getDriver().getEmail());
queueService.updateStatuses(airport.getId());
}
@Test
public void testA() throws Exception {
driverAction.goOnline(regularDriver.getDriver().getEmail(), locationProvider.getAirportLocation());
queueService.updateStatuses(airport.getId());
driverAction.goOnline(woFPDriver.getDriver().getEmail(), locationProvider.getAirportLocation());
queueService.updateStatuses(airport.getId());
final Long ride = riderAction.requestRide(rider.getEmail(), locationProvider.getAirportLocation(), TestUtils.REGULAR,
DriverType.WOMEN_ONLY);
awaitDispatch(woFPDriver, ride);
driverAction.declineRide(woFPDriver.getDriver().getEmail(), ride);
awaitStatus(ride, RideStatus.NO_AVAILABLE_DRIVER);
driverAction.goOffline(regularDriver.getDriver().getEmail());
driverAction.goOffline(woFPDriver.getDriver().getEmail());
queueService.updateStatuses(airport.getId());
}
@Test
public void testB() throws Exception {
driverAction.goOnline(regularDriver.getDriver().getEmail(), locationProvider.getAirportLocation());
queueService.updateStatuses(airport.getId());
driverAction.goOnline(woFPDriver.getDriver().getEmail(), locationProvider.getAirportLocation());
queueService.updateStatuses(airport.getId());
final Long ride = riderAction.requestRide(rider.getEmail(), locationProvider.getAirportLocation(), TestUtils.REGULAR,
DriverType.FINGERPRINTED);
awaitDispatch(regularDriver, ride);
driverAction.declineRide(regularDriver.getDriver().getEmail(), ride);
awaitDispatch(woFPDriver, ride);
driverAction.declineRide(woFPDriver.getDriver().getEmail(), ride);
awaitStatus(ride, RideStatus.NO_AVAILABLE_DRIVER);
driverAction.goOffline(regularDriver.getDriver().getEmail());
driverAction.goOffline(woFPDriver.getDriver().getEmail());
queueService.updateStatuses(airport.getId());
}
}
|
3e0e740eb76bfb3d9c74e8fb2e204b577b192039 | 5,016 | java | Java | hedera-node/src/main/java/com/hedera/services/state/merkle/MerkleEntityAssociation.java | amckay7777/hedera-services | 61c337d926d2f31b05b8dfa0094c88ef0b208db5 | [
"Apache-2.0"
] | null | null | null | hedera-node/src/main/java/com/hedera/services/state/merkle/MerkleEntityAssociation.java | amckay7777/hedera-services | 61c337d926d2f31b05b8dfa0094c88ef0b208db5 | [
"Apache-2.0"
] | null | null | null | hedera-node/src/main/java/com/hedera/services/state/merkle/MerkleEntityAssociation.java | amckay7777/hedera-services | 61c337d926d2f31b05b8dfa0094c88ef0b208db5 | [
"Apache-2.0"
] | null | null | null | 29.680473 | 102 | 0.729067 | 6,130 | package com.hedera.services.state.merkle;
/*-
*
* Hedera Services Node
*
* Copyright (C) 2018 - 2021 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import com.google.common.base.MoreObjects;
import com.hedera.services.store.models.TokenRelationship;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.TokenID;
import com.swirlds.common.io.SerializableDataInputStream;
import com.swirlds.common.io.SerializableDataOutputStream;
import com.swirlds.common.merkle.utility.AbstractMerkleLeaf;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import java.io.IOException;
public class MerkleEntityAssociation extends AbstractMerkleLeaf {
static final int MERKLE_VERSION = 1;
static final long RUNTIME_CONSTRUCTABLE_ID = 0xce8d38caab2e51dcL;
private long fromShard, fromRealm, fromNum;
private long toShard, toRealm, toNum;
public MerkleEntityAssociation() {
}
public MerkleEntityAssociation(
long fromShard, long fromRealm, long fromNum,
long toShard, long toRealm, long toNum
) {
this.fromShard = fromShard;
this.fromRealm = fromRealm;
this.fromNum = fromNum;
this.toShard = toShard;
this.toRealm = toRealm;
this.toNum = toNum;
}
public static MerkleEntityAssociation fromModelRel(TokenRelationship tokenRelationship) {
final var accountId = tokenRelationship.getAccount().getId();
final var tokenId = tokenRelationship.getToken().getId();
return new MerkleEntityAssociation(
accountId.getShard(), accountId.getRealm(), accountId.getNum(),
tokenId.getShard(), tokenId.getRealm(), tokenId.getNum());
}
public static MerkleEntityAssociation fromAccountTokenRel(Pair<AccountID, TokenID> rel) {
return fromAccountTokenRel(rel.getLeft(), rel.getRight());
}
public static MerkleEntityAssociation fromAccountTokenRel(AccountID account, TokenID token) {
return new MerkleEntityAssociation(
account.getShardNum(), account.getRealmNum(), account.getAccountNum(),
token.getShardNum(), token.getRealmNum(), token.getTokenNum());
}
public Pair<AccountID, TokenID> asAccountTokenRel() {
return Pair.of(
AccountID.newBuilder()
.setShardNum(fromShard)
.setRealmNum(fromRealm)
.setAccountNum(fromNum)
.build(),
TokenID.newBuilder()
.setShardNum(toShard)
.setRealmNum(toRealm)
.setTokenNum(toNum)
.build());
}
/* --- MerkleLeaf --- */
@Override
public long getClassId() {
return RUNTIME_CONSTRUCTABLE_ID;
}
@Override
public int getVersion() {
return MERKLE_VERSION;
}
@Override
public void deserialize(SerializableDataInputStream in, int version) throws IOException {
fromShard = in.readLong();
fromRealm = in.readLong();
fromNum = in.readLong();
toShard = in.readLong();
toRealm = in.readLong();
toNum = in.readLong();
}
@Override
public void serialize(SerializableDataOutputStream out) throws IOException {
out.writeLong(fromShard);
out.writeLong(fromRealm);
out.writeLong(fromNum);
out.writeLong(toShard);
out.writeLong(toRealm);
out.writeLong(toNum);
}
/* --- Object --- */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || MerkleEntityAssociation.class != o.getClass()) {
return false;
}
var that = (MerkleEntityAssociation) o;
return new EqualsBuilder()
.append(fromShard, that.fromShard).append(fromRealm, that.fromRealm).append(fromNum, that.fromNum)
.append(toShard, that.toShard).append(toRealm, that.toRealm).append(toNum, that.toNum)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(fromShard).append(fromRealm).append(fromNum)
.append(toShard).append(toRealm).append(toNum)
.toHashCode();
}
/* --- FastCopyable --- */
@Override
public MerkleEntityAssociation copy() {
return new MerkleEntityAssociation(fromShard, fromRealm, fromNum, toShard, toRealm, toNum);
}
/* --- Bean --- */
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("fromShard", fromShard).add("fromRealm", fromRealm).add("fromNum", fromNum)
.add("toShard", toShard).add("toRealm", toRealm).add("toNum", toNum)
.toString();
}
public String toAbbrevString() {
return String.format(
"%d.%d.%d <-> %d.%d.%d",
fromShard, fromRealm, fromNum,
toShard, toRealm, toNum);
}
}
|
3e0e74167e0781db63c267163a9c37c8c4ff1e27 | 366 | java | Java | axis1-ws-security/src/main/java/com/redhat/www/samples/ws/OrderService/OrderService_PortType.java | rmarting/java-samples | 47c5d6ae473bb8201ac928706b5d408214b6761e | [
"Unlicense"
] | 2 | 2015-10-08T07:52:16.000Z | 2016-03-30T22:22:34.000Z | axis1-ws-security/src/main/java/com/redhat/www/samples/ws/OrderService/OrderService_PortType.java | rmarting/java-samples | 47c5d6ae473bb8201ac928706b5d408214b6761e | [
"Unlicense"
] | null | null | null | axis1-ws-security/src/main/java/com/redhat/www/samples/ws/OrderService/OrderService_PortType.java | rmarting/java-samples | 47c5d6ae473bb8201ac928706b5d408214b6761e | [
"Unlicense"
] | null | null | null | 28.153846 | 92 | 0.759563 | 6,131 | /**
* OrderService_PortType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Apr 22, 2006 (06:55:48 PDT) WSDL2Java emitter.
*/
package com.redhat.www.samples.ws.OrderService;
public interface OrderService_PortType extends java.rmi.Remote {
public java.lang.String placeOrder(java.lang.String in) throws java.rmi.RemoteException;
}
|
3e0e7428db3e599f1b9cab7108e1c57ebea9f92e | 4,330 | java | Java | src/test/java/io/github/pureza/warbots/weaponry/ArtilleryTest.java | pureza/warbots | 25289be5ffa876eefbd95489d51cedc33d172afd | [
"MIT"
] | null | null | null | src/test/java/io/github/pureza/warbots/weaponry/ArtilleryTest.java | pureza/warbots | 25289be5ffa876eefbd95489d51cedc33d172afd | [
"MIT"
] | null | null | null | src/test/java/io/github/pureza/warbots/weaponry/ArtilleryTest.java | pureza/warbots | 25289be5ffa876eefbd95489d51cedc33d172afd | [
"MIT"
] | null | null | null | 29.657534 | 127 | 0.67806 | 6,132 | package io.github.pureza.warbots.weaponry;
import io.github.pureza.warbots.Tests;
import io.github.pureza.warbots.entities.Bot;
import io.github.pureza.warbots.geometry.Point;
import io.github.pureza.warbots.geometry.Vector;
import org.hamcrest.core.Is;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsCollectionContaining.hasItem;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.when;
import static io.github.pureza.warbots.Tests.mockBot;
public class ArtilleryTest {
private Bot bot;
private Artillery artillery;
@Before
public void setUp() {
this.bot = Tests.mockBot();
this.artillery = new Artillery(bot);
HandGun handGun = Tests.buildHandGun(bot);
artillery.acquire(handGun);
}
/*
* acquire(Weapon weapon)
*/
@Test
public void acquireAcquiresNewWeapon() {
Bot bot = Tests.mockBot();
artillery.acquire(Tests.buildLaserGun(bot));
assertThat(artillery.getWeapons().keySet(), hasItem(Weapon.WeaponType.LASER_GUN));
}
@Test
public void acquireSwitchesToNewWeapon() {
Bot bot = Tests.mockBot();
LaserGun laserGun = Tests.buildLaserGun(bot);
artillery.acquire(laserGun);
assertThat(artillery.getCurrentWeapon(), is(laserGun));
}
@Test
public void acquireTakesAmmoFromExistingWeapon() {
Bot bot = Tests.mockBot();
int initialAmmo = artillery.getCurrentWeapon().getRemainingAmmo();
HandGun otherGun = Tests.buildHandGun(bot);
artillery.acquire(otherGun);
assertThat(artillery.getCurrentWeapon().getRemainingAmmo(), is(greaterThan(initialAmmo)));
}
/*
* totalWeaponStrength()
*/
@Test
public void totalWeaponStrengthAveragesIndividualWeaponStrength() {
Weapon weapon = artillery.getCurrentWeapon();
assertThat(artillery.totalWeaponStrength(), is(weapon.individualWeaponStrength() / Weapon.WeaponType.values().length));
}
/*
* chooseWeapon(Bot other)
*/
@Test
public void chooseWeaponSelectsMostAppropriateWeapon() {
// The enemy is far away, so the handgun will be preferred
Bot enemy = Tests.mockBot(bot.getLocation().displace(20, 10), Vector.vec(0, 1), 0.3, 0);
Weapon handGun = artillery.getCurrentWeapon();
artillery.acquire(Tests.buildLaserGun(bot));
assertThat(artillery.chooseWeapon(enemy), is(handGun));
}
@Test
public void chooseWeaponDoesntChoosesUnloadedWeaponWhenPossible() {
// The hand gun is almost empty
HandGun handGun = new HandGun(bot, 1, 10, 1, null);
// The rocket launcher is empty
RocketLauncher rocketLauncher = new RocketLauncher(bot, 0, 10, 1, null);
Artillery artillery = new Artillery(bot);
artillery.acquire(handGun);
artillery.acquire(rocketLauncher);
// The enemy is very very close
Bot enemy = Tests.mockBot(bot.getLocation().displace(0.01, 0.01));
// However, the artillery prefers the hand gun because the rocket launcher is empty!
assertThat(artillery.chooseWeapon(enemy), Is.is(handGun));
}
/*
* fireAt(Bot enemy)
*/
@Test
public void fireAtIgnoresEnemiesAtMyBack() {
// I'm at (0, 0) and looking towards (1, 0), so this enemy is outside my field of vision
Bot enemy = Tests.mockBot(Point.pt(0, 1), Vector.vec(0, 1), 0.3, 0);
when(bot.rotateFacing(anyObject(), anyLong())).thenReturn(false);
assertThat(artillery.fireAt(enemy, 1000), is(nullValue()));
}
@Test
public void fireAtSelectsWeaponAndFiresAtEnemiesInFront() {
// I'm at (0, 0) and looking towards (1, 0), so this enemy is right in front of me
Bot enemy = Tests.mockBot(Point.pt(2, 0), Vector.vec(0, 1), 0.3, 0);
when(bot.rotateFacing(anyObject(), anyLong())).thenReturn(true);
assertThat(artillery.fireAt(enemy, 1000), is(not(nullValue())));
}
}
|
3e0e742e8d6eaefd495db2bba8f5edb9a26f5b90 | 4,103 | java | Java | src/main/java/com/github/harbby/gadtry/graph/impl/RouteImpl.java | gonBorn/gadtry | b2e099bbeeb8abcf3d13a151683318c4e939814c | [
"Apache-2.0"
] | 39 | 2018-11-28T13:15:11.000Z | 2021-09-14T15:26:38.000Z | src/main/java/com/github/harbby/gadtry/graph/impl/RouteImpl.java | gonBorn/gadtry | b2e099bbeeb8abcf3d13a151683318c4e939814c | [
"Apache-2.0"
] | 14 | 2018-12-01T07:47:39.000Z | 2021-08-20T05:29:58.000Z | src/main/java/com/github/harbby/gadtry/graph/impl/RouteImpl.java | gonBorn/gadtry | b2e099bbeeb8abcf3d13a151683318c4e939814c | [
"Apache-2.0"
] | 11 | 2018-12-06T02:32:51.000Z | 2022-03-08T12:02:07.000Z | 26.470968 | 103 | 0.593224 | 6,133 | /*
* Copyright (C) 2018 The GadTry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.harbby.gadtry.graph.impl;
import com.github.harbby.gadtry.base.Iterators;
import com.github.harbby.gadtry.base.Lazys;
import com.github.harbby.gadtry.graph.Edge;
import com.github.harbby.gadtry.graph.Node;
import com.github.harbby.gadtry.graph.Route;
import java.util.ArrayList;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.function.Supplier;
import static com.github.harbby.gadtry.base.MoreObjects.toStringHelper;
public class RouteImpl<E, R>
implements Route<E, R>
{
private final Node<E, R> begin;
private final Deque<Edge<E, R>> edges;
private final Supplier<Boolean> findDeadLoop; //如果出现两次则说明发现循环
private final Supplier<List<String>> nodeIds;
public RouteImpl(Node<E, R> begin, Deque<Edge<E, R>> edges)
{
this.begin = begin;
this.edges = edges;
this.findDeadLoop = Lazys.goLazy(() -> {
Edge<E, R> lastEdge = getLastEdge();
return begin.getId().equals(lastEdge.getOutNode().getId()) ||
edges.stream().anyMatch(erEdge -> erEdge != lastEdge && erEdge.getOutNode().getId()
.equals(getLastNode().getId())); //如果出现两次则无须继续递归查找
});
this.nodeIds = Lazys.goLazy(() -> {
List<String> list = new ArrayList<>(this.size() + 1);
list.add(begin.getId());
this.edges.forEach(erEdge -> {
list.add(erEdge.getOutNode().getId());
});
return list;
});
}
@Override
public Route.Builder<E, R> copy()
{
return Route.builder(begin).addAll(this.edges);
}
@Override
public List<String> getIds()
{
return nodeIds.get();
}
/**
* 检测死递归
*
* @return true不存在死递归
*/
@Override
public boolean findDeadLoop()
{
return findDeadLoop.get();
}
@Override
public Deque<Edge<E, R>> getEdges()
{
return edges;
}
@Override
public int size()
{
return edges.size();
}
/**
* 上一个
*/
@Override
public Node<E, R> getLastNode(int index)
{
Iterator<Edge<E, R>> iterator = this.edges.descendingIterator();
if (this.size() == index) {
return begin;
}
else if (this.size() > index) {
return Iterators.getFirst(iterator, index).getOutNode();
}
else {
throw new NoSuchElementException(String.valueOf(index));
}
}
@Override
public Edge<E, R> getLastEdge()
{
if (edges.isEmpty()) {
throw new IllegalStateException("this Route only begin node");
}
return edges.getLast();
}
@Override
public int hashCode()
{
return Objects.hash(begin, edges);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
RouteImpl other = (RouteImpl) obj;
return Objects.equals(this.begin, other.begin) && Objects.equals(this.edges, other.edges);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("begin", begin)
.add("route", String.join("-", getIds()))
.toString();
}
}
|
3e0e74bd24097fe33abc1913d07963c1849d1733 | 709 | java | Java | flycat-context/src/main/java/com/github/flycat/module/ModuleType.java | zgqq/flycat | 9bd0e7fba6e852bde4075c513e0143645fd696e3 | [
"Apache-2.0"
] | 5 | 2019-08-15T18:16:53.000Z | 2021-02-22T15:45:32.000Z | flycat-context/src/main/java/com/github/flycat/module/ModuleType.java | zgqq/flycat | 9bd0e7fba6e852bde4075c513e0143645fd696e3 | [
"Apache-2.0"
] | null | null | null | flycat-context/src/main/java/com/github/flycat/module/ModuleType.java | zgqq/flycat | 9bd0e7fba6e852bde4075c513e0143645fd696e3 | [
"Apache-2.0"
] | 1 | 2020-05-23T04:16:09.000Z | 2020-05-23T04:16:09.000Z | 33.761905 | 75 | 0.733427 | 6,134 | /**
* Copyright 2019 zgqq <efpyi@example.com>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.flycat.module;
public enum ModuleType {
LOCAL, SERVICE, REFERENCE
}
|
3e0e754e9752c086f2eae3b6b9405a64ba526241 | 1,461 | java | Java | membrane-bk/src/main/java/cloud/celldata/membrane/mapper/DataAttributeMapper.java | celldata/Membrane | 68ea599145a1d545b3ce77394482b6023ef282c8 | [
"MIT"
] | 2 | 2020-07-30T03:32:01.000Z | 2020-11-09T03:26:08.000Z | membrane-bk/src/main/java/cloud/celldata/membrane/mapper/DataAttributeMapper.java | celldata/Membrane | 68ea599145a1d545b3ce77394482b6023ef282c8 | [
"MIT"
] | 7 | 2020-06-09T05:49:22.000Z | 2022-02-27T09:10:18.000Z | membrane-bk/src/main/java/cloud/celldata/membrane/mapper/DataAttributeMapper.java | celldata/Membrane | 68ea599145a1d545b3ce77394482b6023ef282c8 | [
"MIT"
] | 2 | 2020-06-09T04:23:14.000Z | 2020-11-09T03:26:11.000Z | 24.35 | 110 | 0.646817 | 6,135 | package cloud.celldata.membrane.mapper;
import cloud.celldata.membrane.pojo.entity.DataAttributeEntity;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* 属性配置 Mapper
*
* @author wyw
* @date 2020-07-16
**/
public interface DataAttributeMapper {
/**
* 添加属性配置实体
*
* @param dataAttributeEntity 数据权限 属性配置实体
* @param userId 用户ID
*/
void addAttribute(@Param("dataAttributeEntity") DataAttributeEntity dataAttributeEntity,
@Param("userId") Integer userId);
/**
* 编辑属性配置实体
*
* @param dataAttributeEntity 数据权限 属性配置实体
* @param userId 用户ID
*/
void updateAttribute(@Param("dataAttributeEntity") DataAttributeEntity dataAttributeEntity,
@Param("userId") Integer userId);
/**
* 根据基础配置ID分页查询属性配置列表
*
* @param dataConfigId 基础配置ID
* @return 同一基础配置下属性配置列表
*/
List<DataAttributeEntity> selectAttributesByDataConfigId(@Param("dataConfigId") Integer dataConfigId);
/**
* 删除属性配置
*
* @param idList 属性配置ID列表
* @param userId 用户ID
*/
void removeAttribute(@Param("idList") List<Integer> idList, @Param("userId") Integer userId);
/**
* 按属性配置名称计数
*
* @param id 属性配置ID
* @param dataAttributeName 属性配置名称
* @return 相同属性配置名称记录条数
*/
Integer countDataAttribute(@Param("id") Integer id, @Param("dataAttributeName") String dataAttributeName);
}
|
3e0e75768954d8627d6358fbd4b1fd8df0556f5d | 31,976 | java | Java | tools/mspsim/se/sics/mspsim/cli/DebugCommands.java | funaquarius24/Contiki-ng-PED | 8c7d2f99c78e6765ab7f5e921e339ed0dc53eccc | [
"BSD-3-Clause"
] | 16 | 2018-01-15T20:37:56.000Z | 2021-11-25T16:20:50.000Z | tools/mspsim/se/sics/mspsim/cli/DebugCommands.java | funaquarius24/Contiki-ng-PED | 8c7d2f99c78e6765ab7f5e921e339ed0dc53eccc | [
"BSD-3-Clause"
] | 2 | 2018-01-16T15:31:43.000Z | 2019-12-09T09:46:48.000Z | tools/mspsim/se/sics/mspsim/cli/DebugCommands.java | funaquarius24/Contiki-ng-PED | 8c7d2f99c78e6765ab7f5e921e339ed0dc53eccc | [
"BSD-3-Clause"
] | 17 | 2018-03-11T16:32:34.000Z | 2021-11-02T09:49:12.000Z | 42.240423 | 164 | 0.492588 | 6,136 | /**
* Copyright (c) 2008, Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Institute nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* This file is part of MSPSim.
*
* -----------------------------------------------------------------
*
* Author : Joakim Eriksson, Niclas Finne
* Created : Mon Feb 11 2008
*/
package se.sics.mspsim.cli;
import se.sics.mspsim.core.DbgInstruction;
import se.sics.mspsim.core.DisAsm;
import se.sics.mspsim.core.EmulationException;
import se.sics.mspsim.core.EmulationLogger.WarningType;
import se.sics.mspsim.core.LogListener;
import se.sics.mspsim.core.Loggable;
import se.sics.mspsim.core.MSP430;
import se.sics.mspsim.core.MSP430Constants;
import se.sics.mspsim.core.Memory;
import se.sics.mspsim.core.Memory.AccessMode;
import se.sics.mspsim.core.Memory.AccessType;
import se.sics.mspsim.core.MemoryMonitor;
import se.sics.mspsim.core.RegisterMonitor;
import se.sics.mspsim.core.TimeEvent;
import se.sics.mspsim.platform.GenericNode;
import se.sics.mspsim.util.ComponentRegistry;
import se.sics.mspsim.util.DebugInfo;
import se.sics.mspsim.util.ELF;
import se.sics.mspsim.util.GDBStubs;
import se.sics.mspsim.util.MapEntry;
import se.sics.mspsim.util.Utils;
public class DebugCommands implements CommandBundle {
private long lastCall = 0;
private long lastWall = 0;
private ComponentRegistry registry;
private ELF getELF() {
return registry.getComponent(ELF.class);
}
public void setupCommands(ComponentRegistry registry, CommandHandler ch) {
this.registry = registry;
final MSP430 cpu = registry.getComponent(MSP430.class);
final GenericNode node = registry.getComponent(GenericNode.class, "node");
if (cpu != null) {
ch.registerCommand("break", new BasicAsyncCommand("add a breakpoint to a given address or symbol",
"<address or symbol>") {
private int address;
private MemoryMonitor monitor;
public int executeCommand(final CommandContext context) {
address = context.getArgumentAsAddress(0);
if (address < 0) {
context.err.println("unknown symbol: " + context.getArgument(0));
return 1;
}
monitor = new MemoryMonitor.Adapter() {
private long lastCycles = -1;
@Override
public void notifyReadBefore(int address, AccessMode mode, AccessType type) {
if (type == AccessType.EXECUTE && cpu.cycles != lastCycles) {
context.out.println("*** Break at $" + cpu.getAddressAsString(address));
cpu.triggBreakpoint();
lastCycles = cpu.cycles;
}
}
};
cpu.addWatchPoint(address, monitor);
context.err.println("Breakpoint set at $" + cpu.getAddressAsString(address));
return 0;
}
public void stopCommand(CommandContext context) {
cpu.removeWatchPoint(address, monitor);
}
});
ch.registerCommand("watch",
new BasicAsyncCommand("add a write/read watch to a given address or symbol", "<address or symbol> [length] [char | hex | break]") {
int mode = 0;
int address = 0;
int length = 1;
MemoryMonitor monitor;
public int executeCommand(final CommandContext context) {
address = context.getArgumentAsAddress(0);
if (address < 0) {
context.err.println("unknown symbol: " + context.getArgument(0));
return -1;
}
if (context.getArgumentCount() > 1) {
for (int i = 1; i < context.getArgumentCount(); i++) {
String modeStr = context.getArgument(i);
if (Character.isDigit(modeStr.charAt(0))) {
length = Integer.parseInt(modeStr);
} else if ("char".equals(modeStr)) {
mode = Utils.ASCII_UNMODIFIED; // 4
} else if ("break".equals(modeStr)) {
mode = 10;
} else if ("hex".equals(modeStr)) {
mode = Utils.HEX; // 2
}
}
}
if (length < 1) {
context.err.println("please specify a length of at least one byte");
return -1;
}
monitor = new MemoryMonitor.Adapter() {
private void cpuAction(AccessType type, int adr, int data) {
if (mode == 0 || mode == 10) {
int pc = cpu.getPC();
String adrStr = getSymOrAddr(cpu, context, adr);
String pcStr = getSymOrAddrELF(cpu, getELF(), pc);
String op = "op";
if (type == AccessType.READ) {
op = "Read";
} else if (type == AccessType.WRITE){
op = "Write";
}
context.out.println("*** " + op + " from " + pcStr +
": " + adrStr + " = 0x" + Utils.hex(data, 4));
if (mode == 10) {
cpu.triggBreakpoint();
}
} else {
if (length > 1) {
Memory mem = cpu.getMemory();
for (int i = address; i < address + length; i++) {
context.out.print(Utils.toString(mem.get(i, AccessMode.BYTE), Utils.BYTE, mode));
}
context.out.println();
} else {
context.out.print(Utils.toString(data, Utils.BYTE, mode));
}
}
}
@Override
public void notifyReadBefore(int addr, AccessMode mode, AccessType type) {
cpuAction(AccessType.READ, addr, cpu.getMemory().get(addr, mode));
}
@Override
public void notifyWriteBefore(int dstAddress, int data, AccessMode mode) {
cpuAction(AccessType.WRITE, dstAddress, data);
}
};
for (int i = 0; i < length; i++) {
cpu.addWatchPoint(address + i, monitor);
}
if (length > 1) {
context.err.println("Watch set at $" + cpu.getAddressAsString(address) + " - $" + cpu.getAddressAsString(address + length - 1));
} else {
context.err.println("Watch set at $" + cpu.getAddressAsString(address));
}
return 0;
}
public void stopCommand(CommandContext context) {
for (int i = 0; i < length; i++) {
cpu.removeWatchPoint(address + i, monitor);
}
context.exit(0);
}
});
ch.registerCommand("watchreg",
new BasicAsyncCommand("add a write watch to a given register", "<register> [int]") {
int watchMode = 0;
int register = 0;
RegisterMonitor monitor;
public int executeCommand(final CommandContext context) {
register = context.getArgumentAsRegister(0);
if (register < 0) {
return -1;
}
if (context.getArgumentCount() > 1) {
String modeStr = context.getArgument(1);
if ("int".equals(modeStr)) {
watchMode = 1;
} else {
context.err.println("illegal argument: " + modeStr);
return -1;
}
}
monitor = new RegisterMonitor.Adapter() {
@Override
public void notifyWriteBefore(int register, int data, AccessMode mode) {
if (watchMode == 0) {
int pc = cpu.getPC();
String adrStr = getRegisterName(register);
String pcStr = getSymOrAddrELF(cpu, getELF(), pc);
context.out.println("*** Write from " + pcStr +
": " + adrStr + " = " + data);
} else {
context.out.println(data);
}
}
};
cpu.addRegisterWriteMonitor(register, monitor);
context.err.println("Watch set for register " + getRegisterName(register));
return 0;
}
public void stopCommand(CommandContext context) {
cpu.removeRegisterWriteMonitor(register, monitor);
}
});
// ch.registerCommand("clear", new BasicCommand("clear a breakpoint or watch from a given address or symbol", "<address or symbol>") {
// public int executeCommand(final CommandContext context) {
// int baddr = context.getArgumentAsAddress(0);
// cpu.setBreakPoint(baddr, null);
// return 0;
// }
// });
ch.registerCommand("symbol", new BasicCommand("list matching symbols", "<regexp>") {
public int executeCommand(final CommandContext context) {
String regExp = context.getArgument(0);
MapEntry[] entries = context.getMapTable().getEntries(regExp);
if (entries.length == 0) {
context.err.println("Could not find any symbols matching '" + regExp + '\'');
} else {
for (MapEntry mapEntry : entries) {
int address = mapEntry.getAddress();
String file = mapEntry.getFile();
if (file == null) {
file = "(unspecified)";
}
context.out.println(" " + mapEntry.getName() + " at $"
+ cpu.getAddressAsString(address) + " ($"
+ Utils.hex8(cpu.getMemory().get(address, AccessMode.BYTE))
+ ' ' + Utils.hex8(cpu.getMemory().get(address + 1, AccessMode.BYTE)) + ") "
+ mapEntry.getType() + " in file " + file);
}
}
return 0;
}
});
ch.registerCommand("debug", new BasicCommand("set debug to on or off", "[0/1]") {
public int executeCommand(final CommandContext context) {
if (context.getArgumentCount() > 0) {
cpu.setDebug(context.getArgumentAsBoolean(0));
}
context.out.println("Debug is set to " + cpu.getDebug());
return 0;
}
});
ch.registerCommand("line", new BasicCommand("print line number of address/symbol", "<address or symbol>") {
public int executeCommand(final CommandContext context) {
int adr = context.getArgumentAsAddress(0);
DebugInfo di = getELF().getDebugInfo(adr);
if (di == null) {
/* quick hack to test next address too... - since something seems to be off by one sometimes... */
di = getELF().getDebugInfo(adr + 1);
}
if (di != null) {
di.getLine();
context.out.println(di);
} else {
context.err.println("No line number found for: " + context.getArgument(0));
}
return 0;
}
});
if (node != null) {
ch.registerCommand("stop", new BasicCommand("stop the CPU", "") {
public int executeCommand(CommandContext context) {
if (!cpu.isRunning()) {
context.err.println("CPU is not running");
return 1;
}
node.stop();
context.out.println("CPU stopped at: $" + cpu.getAddressAsString(cpu.getPC()));
return 0;
}
});
ch.registerCommand("start", new BasicCommand("start the CPU", "") {
public int executeCommand(CommandContext context) {
if (cpu.isRunning()) {
context.err.println("cpu already running");
return 1;
}
node.start();
return 0;
}
});
ch.registerCommand("throw", new BasicCommand("throw an Emulation Exception", "[message]") {
public int executeCommand(CommandContext context) {
final String msg = context.getArgumentCount() > 0 ? context.getArgument(0) : "by request";
cpu.scheduleCycleEvent(new TimeEvent(0, "EmulationException") {
@Override public void execute(long t) {
throw new EmulationException(msg);
}}, cpu.cycles);
return 0;
}
});
ch.registerCommand("step", new BasicCommand("single step the CPU", "[number of instructions]") {
public int executeCommand(CommandContext context) {
int nr = context.getArgumentCount() > 0 ? context.getArgumentAsInt(0) : 1;
long cyc = cpu.cycles;
if (cpu.isRunning()) {
context.err.println("Can not single step when emulation is running.");
return -1;
}
try {
node.step(nr);
} catch (Exception e) {
e.printStackTrace(context.out);
}
context.out.println("CPU stepped to: $" + cpu.getAddressAsString(cpu.getPC()) +
" in " + (cpu.cycles - cyc) + " cycles (" + cpu.cycles + ")");
return 0;
}
});
ch.registerCommand("stepmicro", new BasicCommand("single the CPU specified no micros", "<micro skip> <micro step>") {
public int executeCommand(CommandContext context) {
long cyc = cpu.cycles;
if (cpu.isRunning()) {
context.err.println("Can not single step when emulation is running.");
return -1;
}
long nxt = 0;
try {
nxt = cpu.stepMicros(context.getArgumentAsLong(0), context.getArgumentAsLong(1));
} catch (Exception e) {
e.printStackTrace(context.out);
}
context.out.println("CPU stepped to: $" + cpu.getAddressAsString(cpu.getPC()) +
" in " + (cpu.cycles - cyc) + " cycles (" + cpu.cycles + ") - next exec time: " + nxt);
return 0;
}
});
ch.registerCommand("stack", new BasicCommand("show stack info", "") {
public int executeCommand(CommandContext context) {
int stackEnd = context.getMapTable().heapStartAddress;
int stackStart = context.getMapTable().stackStartAddress;
int current = cpu.getSP();
context.out.println("Current stack: $" + cpu.getAddressAsString(current) + " (" + (stackStart - current) + " used of " + (stackStart - stackEnd) + ')');
return 0;
}
});
ch.registerCommand("print", new BasicCommand("print value of an address or symbol", "<address or symbol>") {
public int executeCommand(CommandContext context) {
int adr = context.getArgumentAsAddress(0);
if (adr >= 0) {
int value = cpu.memory[adr];
if (adr >= 0x100 && adr + 1 < cpu.MAX_MEM) {
value |= cpu.memory[adr + 1] << 8;
}
context.out.println(context.getArgument(0) + " = $" + Utils.hex16(value));
return 0;
}
context.err.println("unknown symbol: " + context.getArgument(0));
return 1;
}
});
ch.registerCommand("printreg", new BasicCommand("print value of an register", "[register]") {
public int executeCommand(CommandContext context) {
if (context.getArgumentCount() > 0) {
for (int i = 0, n = context.getArgumentCount(); i < n; i++) {
int register = context.getArgumentAsRegister(i);
if (i > 0) {
context.out.print((i % 6) == 0 ? "\n" : " ");
}
if (register >= 0) {
context.out.print(getRegisterName(i) + "=$" + Utils.hex(cpu.getRegister(register), 4));
} else {
context.out.print(context.getArgument(i) + "=<not a register>");
}
}
} else {
for (int i = 0; i < 16; i++) {
if (i > 0) {
context.out.print((i % 6) == 0 ? "\n" : " ");
}
context.out.print(getRegisterName(i) + "=$" + Utils.hex(cpu.getRegister(i), 4));
}
}
context.out.println();
return 0;
}
});
ch.registerCommand("reset", new BasicCommand("reset the CPU", "") {
public int executeCommand(CommandContext context) {
cpu.reset();
return 0;
}
});
ch.registerCommand("time", new BasicCommand("print the elapse time and cycles", "") {
public int executeCommand(CommandContext context) {
long time = (long)cpu.getTimeMillis();
long wallDiff = System.currentTimeMillis() - lastWall;
context.out.println("Emulated time elapsed: " + time + "(ms) since last: " + (time - lastCall) + " ms" + " wallTime: " +
wallDiff + " ms speed factor: " +
(wallDiff == 0 ? "N/A" : "" + (time - lastCall) / wallDiff));
lastCall = time;
lastWall = System.currentTimeMillis();
return 0;
}
});
ch.registerCommand("mem", new BasicCommand("dump memory", "<start address> <num_entries> [type] [hex|char|dis]") {
public int executeCommand(final CommandContext context) {
int start = context.getArgumentAsAddress(0);
if (start < 0) {
context.err.println("Illegal start address: "
+ context.getArgument(0));
return 1;
}
int count = context.getArgumentAsInt(1);
int mode = Utils.DEC;
int type = Utils.UBYTE;
boolean signed = false;
if (context.getArgumentCount() > 2) {
int pos = 2;
int acount = context.getArgumentCount();
if (acount > 4) acount = 4;
while (pos < acount) {
String tS = context.getArgument(pos++);
if ("ubyte".equals(tS)) {
} else if ("byte".equals(tS)) {
type = Utils.BYTE;
} else if ("word".equals(tS)) {
type = Utils.WORD;
} else if ("uword".equals(tS)) {
type = Utils.UWORD;
} else if ("hex".equals(tS)) {
mode = Utils.HEX;
} else if ("char".equals(tS)) {
mode = Utils.ASCII;
type = Utils.BYTE;
} else if ("dis".equals(tS)) {
mode = Utils.DIS_ASM;
type = Utils.WORD;
}
}
}
// Does not yet handle signed data...
DisAsm disAsm = cpu.getDisAsm();
for (int i = 0; i < count; i++) {
if (mode == Utils.DIS_ASM) {
DbgInstruction dbg = disAsm.disassemble(start, cpu.memory, cpu.reg, new DbgInstruction(),
0);
String fkn;
if ((fkn = dbg.getFunction()) != null) {
context.out.println("//// " + fkn);
}
context.out.println(dbg.getASMLine(false));
start += dbg.getSize();
} else {
int data = 0;
data = cpu.memory[start++];
if (Utils.size(type) == 2) {
data = data + (cpu.memory[start++] << 8);
}
context.out.print((mode != Utils.ASCII ? " " : "") +
Utils.toString(data, type, mode));
}
}
context.out.println();
return 0;
}
});
ch.registerCommand("mset", new BasicCommand("set memory", "<address> [type] <value> [value ...]") {
public int executeCommand(final CommandContext context) {
int count = context.getArgumentCount();
int adr = context.getArgumentAsAddress(0);
String arg2 = context.getArgument(1);
int type = Utils.BYTE;
int mode = Utils.DEC;
boolean typeRead = false;
if (count > 2) {
if ("char".equals(arg2)) {
mode = Utils.ASCII;
typeRead = true;
}
if ("word".equals(arg2)) {
type = Utils.WORD;
typeRead = true;
}
}
for (int i = typeRead ? 2 : 1; i < count; i++) {
if (mode == Utils.DEC) {
int val = context.getArgumentAsInt(i);
AccessMode accessMode = Utils.size(type) == 2 || val > 0xff ? AccessMode.WORD : AccessMode.BYTE;
try {
cpu.getMemory().set(adr, val, accessMode);
adr += accessMode.bytes;
} catch (EmulationException e) {
e.printStackTrace(context.out);
}
} else if (mode == Utils.ASCII) {
String data = context.getArgument(i);
Memory mem = cpu.getMemory();
for (int j = 0; j < data.length(); j++) {
mem.set(adr++, data.charAt(j), AccessMode.BYTE);
}
}
}
return 0;
}});
/******************************************************
* handle external memory (flash, etc).
******************************************************/
ch.registerCommand("xmem", new BasicCommand("dump flash memory", "<start address> <num_entries> [type]") {
public int executeCommand(final CommandContext context) {
se.sics.mspsim.chip.Memory xmem = DebugCommands.this.registry.getComponent(se.sics.mspsim.chip.Memory.class, "xmem");
if (xmem == null) {
context.err.println("No xmem component registered");
return 0;
}
int start = context.getArgumentAsAddress(0);
int count = context.getArgumentAsInt(1);
int size = 1; // unsigned byte
boolean signed = false;
if (context.getArgumentCount() > 2) {
String tS = context.getArgument(2);
if ("byte".equals(tS)) {
signed = true;
} else if ("word".equals(tS)) {
signed = true;
size = 2;
} else if ("uword".equals(tS)) {
size = 2;
}
}
// Does not yet handle signed data...
for (int i = 0; i < count; i++) {
int data = 0;
data = xmem.readByte(start++);
if (size == 2) {
data = data + (xmem.readByte(start++) << 8);
}
context.out.print(" " + data);
}
context.out.println();
return 0;
}
});
ch.registerCommand("xmset", new BasicCommand("set memory", "<address> <value> [type]") {
public int executeCommand(final CommandContext context) {
se.sics.mspsim.chip.Memory xmem = DebugCommands.this.registry.getComponent(se.sics.mspsim.chip.Memory.class, "xmem");
if (xmem == null) {
context.err.println("No xmem component registered");
return 0;
}
int adr = context.getArgumentAsAddress(0);
int val = context.getArgumentAsInt(1);
boolean word = val > 0xff;
if (word) {
xmem.writeByte(adr, val >> 8);
val = val & 0xff;
adr++;
}
xmem.writeByte(adr, val & 0xff);
return 0;
}});
ch.registerCommand("gdbstubs", new BasicCommand("open up a gdb stubs server for GDB remote debugging", "port") {
private GDBStubs stubs = null;
public int executeCommand(CommandContext context) {
if (stubs != null) {
context.err.println("GDBStubs already open");
} else {
int port = context.getArgumentAsInt(0);
stubs = new GDBStubs();
stubs.setupServer(cpu, port);
}
return 0;
}
});
ch.registerCommand("log", new BasicAsyncCommand("log a loggable object", "[loggable...]" ) {
private Loggable[] logs;
private int[] logLevels;
private LogListener logListener;
@Override
public int executeCommand(final CommandContext context) {
if (context.getArgumentCount() == 0) {
Loggable[] loggable = cpu.getLoggables();
for (Loggable unit : loggable) {
String id = unit.getID();
String name = unit.getName();
if (id == name) {
context.out.println(" " + id);
} else {
context.out.println(" " + id + " (" + name + ')');
}
}
context.exit(0);
return 0;
}
final Loggable[] logs = new Loggable[context.getArgumentCount()];
for(int i = 0, n = context.getArgumentCount(); i < n; i++) {
logs[i] = cpu.getLoggable(context.getArgument(i));
if (logs[i] == null) {
context.err.println("Can not find loggable '" + context.getArgument(i) + '\'');
return 1;
}
}
logListener = new LogListener() {
boolean isLogging(Loggable source) {
for(Loggable log : logs) {
if (source == log) {
return true;
}
}
return false;
}
@Override
public void log(Loggable source, String message) {
if (isLogging(source)) {
context.out.println(source.getID() + ": " + message);
}
}
@Override
public void logw(Loggable source, WarningType type,
String message) throws EmulationException {
if (isLogging(source)) {
context.out.println("# " + source.getID() + "[" + type + "]: " + message);
}
}
};
this.logs = logs;
cpu.getLogger().addLogListener(logListener);
logLevels = new int[logs.length];
int i = 0;
for(Loggable log : logs) {
logLevels[i++] = log.getLogLevel();
log.setLogLevel(Loggable.DEBUG);
}
return 0;
}
public void stopCommand(CommandContext context) {
if (logListener != null) {
cpu.getLogger().removeLogListener(logListener);
logListener = null;
}
if (logs != null) {
int i = 0;
for(Loggable log : logs) {
if (log.getLogLevel() == Loggable.DEBUG) {
log.setLogLevel(logLevels[i]);
}
i++;
}
}
}
});
ch.registerCommand("trace", new BasicCommand("store a trace of execution positions.", "[trace size | show]") {
@Override
public int executeCommand(CommandContext context) {
if (context.getArgumentCount() > 0) {
if ("show".equals(context.getArgument(0))) {
int size = cpu.getTraceSize();
if (size > 0) {
DisAsm disAsm = cpu.getDisAsm();
for (int i = 0; i < size; i++) {
int pc = cpu.getBackTrace(size - 1 - i);
DbgInstruction inst = disAsm.getDbgInstruction(pc, cpu);
inst.setPos(pc);
context.out.println(inst.getASMLine(false));
}
return 0;
}
} else {
int newSize = context.getArgumentAsInt(0, -1);
if (newSize < 0) {
return 1;
}
cpu.setTrace(newSize);
}
}
context.out.println("Trace size is set to " + cpu.getTraceSize() + " positions.");
return 0;
}
});
ch.registerCommand("events", new BasicCommand("print event queues", "") {
@Override
public int executeCommand(CommandContext context) {
cpu.printEventQueues(context.out);
return 0;
}
});
}
}
}
private static String getSymOrAddr(MSP430 cpu, CommandContext context, int adr) {
MapEntry me = context.getMapTable().getEntry(adr);
if (me != null) {
return me.getName();
}
return '$' + cpu.getAddressAsString(adr);
}
private static String getSymOrAddrELF(MSP430 cpu, ELF elf, int adr) {
DebugInfo me = elf.getDebugInfo(adr);
if (me != null) {
return me.toString();
}
return '$' + cpu.getAddressAsString(adr);
}
private static String getRegisterName(int register) {
if (register >= 0 && register < MSP430Constants.REGISTER_NAMES.length) {
return MSP430Constants.REGISTER_NAMES[register];
}
return "R" + register;
}
}
|
3e0e7602ff825dd0cfb4e8189fb54fcc3ce950cf | 1,366 | java | Java | IloveSeoul_source_from_JADX/gnu/mapping/KeyPair.java | MobileSeoul/2017seoul-55 | 55f8f8a97bf954b47753e2d750dac723354bcbab | [
"MIT"
] | 5 | 2019-05-23T01:00:37.000Z | 2019-05-24T08:16:54.000Z | IloveSeoul_source_from_JADX/gnu/mapping/KeyPair.java | MobileSeoul/2017seoul-55 | 55f8f8a97bf954b47753e2d750dac723354bcbab | [
"MIT"
] | null | null | null | IloveSeoul_source_from_JADX/gnu/mapping/KeyPair.java | MobileSeoul/2017seoul-55 | 55f8f8a97bf954b47753e2d750dac723354bcbab | [
"MIT"
] | 1 | 2019-05-23T01:00:38.000Z | 2019-05-23T01:00:38.000Z | 25.296296 | 101 | 0.56735 | 6,137 | package gnu.mapping;
public class KeyPair implements EnvironmentKey {
Symbol name;
Object property;
public KeyPair(Symbol name, Object property) {
this.name = name;
this.property = property;
}
public Symbol getKeySymbol() {
return this.name;
}
public Object getKeyProperty() {
return this.property;
}
public final boolean matches(EnvironmentKey key) {
return Symbol.equals(key.getKeySymbol(), this.name) && key.getKeyProperty() == this.property;
}
public final boolean matches(Symbol symbol, Object property) {
return Symbol.equals(symbol, this.name) && property == this.property;
}
public boolean equals(Object x) {
if (!(x instanceof KeyPair)) {
return false;
}
KeyPair e2 = (KeyPair) x;
if (this.property != e2.property) {
return false;
}
if (this.name == null) {
if (e2.name != null) {
return false;
}
} else if (!this.name.equals(e2.name)) {
return false;
}
return true;
}
public int hashCode() {
return this.name.hashCode() ^ System.identityHashCode(this.property);
}
public String toString() {
return "KeyPair[sym:" + this.name + " prop:" + this.property + "]";
}
}
|
3e0e7608ba99328f4d9d38382159e9734c340d91 | 2,245 | java | Java | src/main/java/dev/nocalhost/plugin/intellij/ui/action/workload/ResetAction.java | fatjyc/nocalhost-intellij-plugin | 5b190e3d0ba993fbfc545df7d3ee5e5b2af519c3 | [
"Apache-2.0"
] | null | null | null | src/main/java/dev/nocalhost/plugin/intellij/ui/action/workload/ResetAction.java | fatjyc/nocalhost-intellij-plugin | 5b190e3d0ba993fbfc545df7d3ee5e5b2af519c3 | [
"Apache-2.0"
] | null | null | null | src/main/java/dev/nocalhost/plugin/intellij/ui/action/workload/ResetAction.java | fatjyc/nocalhost-intellij-plugin | 5b190e3d0ba993fbfc545df7d3ee5e5b2af519c3 | [
"Apache-2.0"
] | null | null | null | 40.089286 | 118 | 0.722494 | 6,138 | package dev.nocalhost.plugin.intellij.ui.action.workload;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import dev.nocalhost.plugin.intellij.commands.NhctlCommand;
import dev.nocalhost.plugin.intellij.commands.data.NhctlResetOptions;
import dev.nocalhost.plugin.intellij.exception.NocalhostExecuteCmdException;
import dev.nocalhost.plugin.intellij.exception.NocalhostNotifier;
import dev.nocalhost.plugin.intellij.ui.tree.node.ResourceNode;
public class ResetAction extends AnAction {
private static final Logger LOG = Logger.getInstance(ResetAction.class);
private final Project project;
private final ResourceNode node;
public ResetAction(Project project, ResourceNode node) {
super("Reset", "", AllIcons.General.Reset);
this.project = project;
this.node = node;
}
@Override
public void actionPerformed(@NotNull AnActionEvent event) {
ProgressManager.getInstance().run(new Task.Backgroundable(null, "Resetting " + node.resourceName(), false) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
final NhctlCommand nhctlCommand = ServiceManager.getService(NhctlCommand.class);
NhctlResetOptions opts = new NhctlResetOptions(node.devSpace());
opts.setDeployment(node.resourceName());
try {
nhctlCommand.reset(node.applicationName(), opts);
NocalhostNotifier.getInstance(project).notifySuccess(node.resourceName() + " reset complete", "");
} catch (IOException | InterruptedException | NocalhostExecuteCmdException e) {
LOG.error("error occurred while resetting workload", e);
}
}
});
}
}
|
3e0e762322354a250597b32e8c4fd112d8c42c20 | 4,577 | java | Java | src/main/java/com/bytatech/ayoos/consultation/client/activiti/model/TaskActionRequest.java | BYTA-TECH/CONSULTATION-MICROSERVICE | 843894551193ac8852a26772e191067e74dea30f | [
"Apache-2.0"
] | null | null | null | src/main/java/com/bytatech/ayoos/consultation/client/activiti/model/TaskActionRequest.java | BYTA-TECH/CONSULTATION-MICROSERVICE | 843894551193ac8852a26772e191067e74dea30f | [
"Apache-2.0"
] | 1 | 2021-06-10T17:33:11.000Z | 2021-06-10T17:33:11.000Z | src/main/java/com/bytatech/ayoos/consultation/client/activiti/model/TaskActionRequest.java | BYTA-TECH/consultation | d111ec450cff31cc7712a026a17bfcc1f039141a | [
"Apache-2.0"
] | 1 | 2019-10-04T11:04:01.000Z | 2019-10-04T11:04:01.000Z | 25.427778 | 141 | 0.701988 | 6,139 | package com.bytatech.ayoos.consultation.client.activiti.model;
import java.util.Objects;
import com.bytatech.ayoos.consultation.client.activiti.model.RestVariable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
import javax.validation.constraints.*;
/**
* TaskActionRequest
*/
@Validated
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.SpringCodegen", date = "2020-01-24T11:45:50.209+05:30[Asia/Colombo]")
public class TaskActionRequest {
@JsonProperty("action")
private String action = null;
@JsonProperty("assignee")
private String assignee = null;
@JsonProperty("variables")
@Valid
private List<RestVariable> variables = null;
@JsonProperty("transientVariables")
@Valid
private List<RestVariable> transientVariables = null;
public TaskActionRequest action(String action) {
this.action = action;
return this;
}
/**
* Get action
* @return action
**/
@ApiModelProperty(value = "")
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
public TaskActionRequest assignee(String assignee) {
this.assignee = assignee;
return this;
}
/**
* Get assignee
* @return assignee
**/
@ApiModelProperty(value = "")
public String getAssignee() {
return assignee;
}
public void setAssignee(String assignee) {
this.assignee = assignee;
}
public TaskActionRequest variables(List<RestVariable> variables) {
this.variables = variables;
return this;
}
public TaskActionRequest addVariablesItem(RestVariable variablesItem) {
if (this.variables == null) {
this.variables = new ArrayList<RestVariable>();
}
this.variables.add(variablesItem);
return this;
}
/**
* Get variables
* @return variables
**/
@ApiModelProperty(value = "")
@Valid
public List<RestVariable> getVariables() {
return variables;
}
public void setVariables(List<RestVariable> variables) {
this.variables = variables;
}
public TaskActionRequest transientVariables(List<RestVariable> transientVariables) {
this.transientVariables = transientVariables;
return this;
}
public TaskActionRequest addTransientVariablesItem(RestVariable transientVariablesItem) {
if (this.transientVariables == null) {
this.transientVariables = new ArrayList<RestVariable>();
}
this.transientVariables.add(transientVariablesItem);
return this;
}
/**
* Get transientVariables
* @return transientVariables
**/
@ApiModelProperty(value = "")
@Valid
public List<RestVariable> getTransientVariables() {
return transientVariables;
}
public void setTransientVariables(List<RestVariable> transientVariables) {
this.transientVariables = transientVariables;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TaskActionRequest taskActionRequest = (TaskActionRequest) o;
return Objects.equals(this.action, taskActionRequest.action) &&
Objects.equals(this.assignee, taskActionRequest.assignee) &&
Objects.equals(this.variables, taskActionRequest.variables) &&
Objects.equals(this.transientVariables, taskActionRequest.transientVariables);
}
@Override
public int hashCode() {
return Objects.hash(action, assignee, variables, transientVariables);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class TaskActionRequest {\n");
sb.append(" action: ").append(toIndentedString(action)).append("\n");
sb.append(" assignee: ").append(toIndentedString(assignee)).append("\n");
sb.append(" variables: ").append(toIndentedString(variables)).append("\n");
sb.append(" transientVariables: ").append(toIndentedString(transientVariables)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
3e0e78f544546a6f6f24ae3211f9b9befbefd084 | 1,040 | java | Java | src/main/java/hotchemi/com/github/Cache.java | hotchemi/lrucache | 410539213c44b8794ca4111f678714a1c2395749 | [
"Apache-2.0"
] | 86 | 2015-01-12T02:21:48.000Z | 2021-12-16T07:27:36.000Z | src/main/java/hotchemi/com/github/Cache.java | withyou/LruCache | 410539213c44b8794ca4111f678714a1c2395749 | [
"Apache-2.0"
] | 2 | 2019-09-01T06:52:00.000Z | 2019-10-12T01:44:37.000Z | src/main/java/hotchemi/com/github/Cache.java | withyou/LruCache | 410539213c44b8794ca4111f678714a1c2395749 | [
"Apache-2.0"
] | 38 | 2015-05-04T03:04:06.000Z | 2022-01-01T12:16:36.000Z | 19.259259 | 77 | 0.544231 | 6,140 | package hotchemi.com.github;
/**
* A memory cache interface.
*
* @author Shintaro Katafuchi
*/
public interface Cache<K, V> {
/**
* Gets an value for the specified {@code key} or return {@code null}.
*
* @param key key
* @return the value or {@code null}.
*/
V get(K key);
/**
* Puts an value in the cache for the specified {@code key}.
*
* @param key key
* @param value image
* @return the previous value.
*/
V put(K key, V value);
/**
* Removes the entry for {@code key} if it exists or return {@code null}.
*
* @return the previous value or @{code null}.
*/
V remove(K key);
/**
* Clears all the entries in the cache.
*/
void clear();
/**
* Returns the max memory size of the cache.
*
* @return max memory size.
*/
int getMaxMemorySize();
/**
* Returns the current memory size of the cache.
*
* @return current memory size.
*/
int getMemorySize();
}
|
3e0e79c9a4625f2fdb28192d5d0b37e2ca567c9e | 1,344 | java | Java | src/main/java/org/soulwing/credo/service/crypto/bc/BcCertificationRequestBuilderFactory.java | soulwing/credo | 261d79a32d57ca6482cd75371a552d745ca6e430 | [
"Apache-2.0"
] | null | null | null | src/main/java/org/soulwing/credo/service/crypto/bc/BcCertificationRequestBuilderFactory.java | soulwing/credo | 261d79a32d57ca6482cd75371a552d745ca6e430 | [
"Apache-2.0"
] | 24 | 2016-03-02T15:27:12.000Z | 2016-09-23T20:44:09.000Z | src/main/java/org/soulwing/credo/service/crypto/bc/BcCertificationRequestBuilderFactory.java | soulwing/credo | 261d79a32d57ca6482cd75371a552d745ca6e430 | [
"Apache-2.0"
] | null | null | null | 29.866667 | 76 | 0.763393 | 6,141 | /*
* File created on Mar 21, 2014
*
* Copyright (c) 2014 Virginia Polytechnic Institute and State University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.soulwing.credo.service.crypto.bc;
import javax.enterprise.context.ApplicationScoped;
import org.soulwing.credo.service.crypto.CertificationRequestBuilder;
import org.soulwing.credo.service.crypto.CertificationRequestBuilderFactory;
/**
* A {@link CertificationRequestBuilderFactory} that produces
* {@link BcCertificationRequestBuilder} objects.
*
* @author Carl Harris
*/
@ApplicationScoped
public class BcCertificationRequestBuilderFactory
implements CertificationRequestBuilderFactory {
/**
* {@inheritDoc}
*/
@Override
public CertificationRequestBuilder newBuilder() {
return new BcCertificationRequestBuilder();
}
}
|
3e0e7a0a3f31e65904f8219ef37b2b5fe6ded8c3 | 510 | java | Java | src/main/java/com/mvm/webflux/usecase/PlaylistSearchAllImpl.java | marcosmariano/webflux | 60f90d2e4f93c64871a292effee10164daede194 | [
"MIT"
] | 1 | 2022-03-01T07:06:11.000Z | 2022-03-01T07:06:11.000Z | src/main/java/com/mvm/webflux/usecase/PlaylistSearchAllImpl.java | marcosmariano/webflux | 60f90d2e4f93c64871a292effee10164daede194 | [
"MIT"
] | null | null | null | src/main/java/com/mvm/webflux/usecase/PlaylistSearchAllImpl.java | marcosmariano/webflux | 60f90d2e4f93c64871a292effee10164daede194 | [
"MIT"
] | null | null | null | 26.842105 | 64 | 0.792157 | 6,142 | package com.mvm.webflux.usecase;
import com.mvm.webflux.domain.Playlist;
import com.mvm.webflux.infra.service.PlaylistService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Flux;
@Component
public class PlaylistSearchAllImpl implements PlaylistSearchAll{
@Autowired
PlaylistService playlistService;
@Override
public Flux<Playlist> searchAll() {
return playlistService.findAll();
}
}
|
3e0e7b22f490df17ae29b731cbd557d967f22f9e | 2,329 | java | Java | src/main/java/com/microsoft/graph/requests/CloudPcAuditEventGetAuditActivityTypesCollectionPage.java | microsoftgraph/msgraph-beta-sdk-java | 2d88d8e031a209cc2152834fd76151e99ea07fad | [
"MIT"
] | 11 | 2020-05-20T08:44:09.000Z | 2022-02-23T21:45:27.000Z | src/main/java/com/microsoft/graph/requests/CloudPcAuditEventGetAuditActivityTypesCollectionPage.java | microsoftgraph/msgraph-beta-sdk-java | 2d88d8e031a209cc2152834fd76151e99ea07fad | [
"MIT"
] | 85 | 2019-12-18T17:57:37.000Z | 2022-03-29T12:49:01.000Z | src/main/java/com/microsoft/graph/requests/CloudPcAuditEventGetAuditActivityTypesCollectionPage.java | microsoftgraph/msgraph-beta-sdk-java | 2d88d8e031a209cc2152834fd76151e99ea07fad | [
"MIT"
] | 7 | 2020-04-30T20:17:37.000Z | 2022-02-10T08:25:26.000Z | 48.520833 | 234 | 0.756548 | 6,143 | // Template Source: BaseMethodCollectionPage.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.requests;
import com.microsoft.graph.http.IRequestBuilder;
import com.microsoft.graph.core.ClientException;
import java.util.Arrays;
import java.util.EnumSet;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
import com.microsoft.graph.http.BaseCollectionPage;
import com.microsoft.graph.requests.CloudPcAuditEventGetAuditActivityTypesCollectionRequestBuilder;
import com.microsoft.graph.requests.CloudPcAuditEventGetAuditActivityTypesCollectionPage;
import com.microsoft.graph.requests.CloudPcAuditEventGetAuditActivityTypesCollectionResponse;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Cloud Pc Audit Event Get Audit Activity Types Collection Page.
*/
public class CloudPcAuditEventGetAuditActivityTypesCollectionPage extends BaseCollectionPage<String, CloudPcAuditEventGetAuditActivityTypesCollectionRequestBuilder> {
/**
* A collection page for String.
*
* @param response The serialized CloudPcAuditEventGetAuditActivityTypesCollectionResponse from the service
* @param builder The request builder for the next collection page
*/
public CloudPcAuditEventGetAuditActivityTypesCollectionPage(@Nonnull final CloudPcAuditEventGetAuditActivityTypesCollectionResponse response, @Nonnull final CloudPcAuditEventGetAuditActivityTypesCollectionRequestBuilder builder) {
super(response, builder);
}
/**
* Creates the collection page for CloudPcAuditEventGetAuditActivityTypes
*
* @param pageContents the contents of this page
* @param nextRequestBuilder the request builder for the next page
*/
public CloudPcAuditEventGetAuditActivityTypesCollectionPage(@Nonnull final java.util.List<String> pageContents, @Nullable final CloudPcAuditEventGetAuditActivityTypesCollectionRequestBuilder nextRequestBuilder) {
super(pageContents, nextRequestBuilder);
}
}
|
3e0e7b2a84c96e4701f239849a1b69af8358222f | 27,907 | java | Java | seatunnel-config/src/main/java/org/apache/seatunnel/config/impl/ConfigParser.java | wntp/seatunnel | cbfdb7d30e012736da05e3067b94711d7fa64dd2 | [
"Apache-2.0"
] | 1,771 | 2017-08-06T08:40:47.000Z | 2021-09-16T13:59:07.000Z | seatunnel-config/src/main/java/org/apache/seatunnel/config/impl/ConfigParser.java | wntp/seatunnel | cbfdb7d30e012736da05e3067b94711d7fa64dd2 | [
"Apache-2.0"
] | 340 | 2017-08-15T06:33:06.000Z | 2021-09-16T15:59:42.000Z | seatunnel-config/src/main/java/org/apache/seatunnel/config/impl/ConfigParser.java | wntp/seatunnel | cbfdb7d30e012736da05e3067b94711d7fa64dd2 | [
"Apache-2.0"
] | 581 | 2017-08-14T05:56:19.000Z | 2021-09-15T01:46:31.000Z | 47.3 | 154 | 0.508869 | 6,144 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seatunnel.config.impl;
import org.apache.seatunnel.config.ConfigException;
import org.apache.seatunnel.config.ConfigException.BugOrBroken;
import org.apache.seatunnel.config.ConfigIncludeContext;
import org.apache.seatunnel.config.ConfigOrigin;
import org.apache.seatunnel.config.ConfigParseOptions;
import org.apache.seatunnel.config.ConfigSyntax;
import org.apache.seatunnel.config.ConfigValueFactory;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
final class ConfigParser {
static AbstractConfigValue parse(ConfigNodeRoot document,
ConfigOrigin origin, ConfigParseOptions options,
ConfigIncludeContext includeContext) {
ParseContext context = new ParseContext(options.getSyntax(), origin, document,
SimpleIncluder.makeFull(options.getIncluder()), includeContext);
return context.parse();
}
private static final class ParseContext {
private int lineNumber;
private final ConfigNodeRoot document;
private final FullIncluder includer;
private final ConfigIncludeContext includeContext;
private final ConfigSyntax flavor;
private final ConfigOrigin baseOrigin;
private final LinkedList<Path> pathStack;
// the number of lists we are inside; this is used to detect the "cannot
// generate a reference to a list element" problem, and once we fix that
// problem we should be able to get rid of this variable.
int arrayCount;
ParseContext(ConfigSyntax flavor, ConfigOrigin origin, ConfigNodeRoot document,
FullIncluder includer, ConfigIncludeContext includeContext) {
lineNumber = 1;
this.document = document;
this.flavor = flavor;
this.baseOrigin = origin;
this.includer = includer;
this.includeContext = includeContext;
this.pathStack = new LinkedList<Path>();
this.arrayCount = 0;
}
// merge a bunch of adjacent values into one
// value; change unquoted text into a string
// value.
private AbstractConfigValue parseConcatenation(ConfigNodeConcatenation n) {
// this trick is not done in JSON
if (flavor == ConfigSyntax.JSON) {
throw new BugOrBroken("Found a concatenation node in JSON");
}
List<AbstractConfigValue> values = new ArrayList<AbstractConfigValue>();
for (AbstractConfigNode node : n.children()) {
AbstractConfigValue v = null;
if (node instanceof AbstractConfigNodeValue) {
v = parseValue((AbstractConfigNodeValue) node, null);
values.add(v);
}
}
return ConfigConcatenation.concatenate(values);
}
private SimpleConfigOrigin lineOrigin() {
return ((SimpleConfigOrigin) baseOrigin).withLineNumber(lineNumber);
}
private ConfigException parseError(String message) {
return parseError(message, null);
}
private ConfigException parseError(String message, Throwable cause) {
return new ConfigException.Parse(lineOrigin(), message, cause);
}
private Path fullCurrentPath() {
// pathStack has top of stack at front
if (pathStack.isEmpty()) {
throw new ConfigException.BugOrBroken("Bug in parser; tried to get current path when at root");
}
return new Path(pathStack.descendingIterator());
}
private AbstractConfigValue parseValue(AbstractConfigNodeValue n, List<String> comments) {
AbstractConfigValue v;
int startingArrayCount = arrayCount;
if (n instanceof ConfigNodeSimpleValue) {
v = ((ConfigNodeSimpleValue) n).value();
} else if (n instanceof ConfigNodeObject) {
Path path = pathStack.peekFirst();
if (path != null
&& ("input".equals(path.first())
|| "filter".equals(path.first())
|| "output".equals(path.first())
|| "source".equals(path.first())
|| "transform".equals(path.first())
|| "sink".equals(path.first()))) {
v = parseObjectForSeatunnel((ConfigNodeObject) n);
} else {
v = parseObject((ConfigNodeObject) n);
}
} else if (n instanceof ConfigNodeArray) {
v = parseArray((ConfigNodeArray) n);
} else if (n instanceof ConfigNodeConcatenation) {
v = parseConcatenation((ConfigNodeConcatenation) n);
} else {
throw parseError("Expecting a value but got wrong node type: " + n.getClass());
}
if (comments != null && !comments.isEmpty()) {
v = v.withOrigin(v.origin().prependComments(new ArrayList<String>(comments)));
comments.clear();
}
if (arrayCount != startingArrayCount) {
throw new ConfigException.BugOrBroken("Bug in config parser: unbalanced array count");
}
return v;
}
private static AbstractConfigObject createValueUnderPath(Path path,
AbstractConfigValue value) {
// for path foo.bar, we are creating
// { "foo" : { "bar" : value } }
List<String> keys = new ArrayList<String>();
String key = path.first();
Path remaining = path.remainder();
while (key != null) {
keys.add(key);
if (remaining == null) {
break;
} else {
key = remaining.first();
remaining = remaining.remainder();
}
}
// the withComments(null) is to ensure comments are only
// on the exact leaf node they apply to.
// a comment before "foo.bar" applies to the full setting
// "foo.bar" not also to "foo"
ListIterator<String> i = keys.listIterator(keys.size());
String deepest = i.previous();
AbstractConfigObject o = new SimpleConfigObject(value.origin().withComments(null),
Collections.<String, AbstractConfigValue>singletonMap(
deepest, value));
while (i.hasPrevious()) {
Map<String, AbstractConfigValue> m = Collections.<String, AbstractConfigValue>singletonMap(
i.previous(), o);
o = new SimpleConfigObject(value.origin().withComments(null), m);
}
return o;
}
private void parseInclude(Map<String, AbstractConfigValue> values, ConfigNodeInclude n) {
boolean isRequired = n.isRequired();
ConfigIncludeContext cic = includeContext.setParseOptions(includeContext.parseOptions().setAllowMissing(!isRequired));
AbstractConfigObject obj;
switch (n.kind()) {
case URL:
URL url;
try {
url = new URL(n.name());
} catch (MalformedURLException e) {
throw parseError("include url() specifies an invalid URL: " + n.name(), e);
}
obj = (AbstractConfigObject) includer.includeURL(cic, url);
break;
case FILE:
obj = (AbstractConfigObject) includer.includeFile(cic,
new File(n.name()));
break;
case CLASSPATH:
obj = (AbstractConfigObject) includer.includeResources(cic, n.name());
break;
case HEURISTIC:
obj = (AbstractConfigObject) includer
.include(cic, n.name());
break;
default:
throw new ConfigException.BugOrBroken("should not be reached");
}
// we really should make this work, but for now throwing an
// exception is better than producing an incorrect result.
// See https://github.com/lightbend/config/issues/160
if (arrayCount > 0 && obj.resolveStatus() != ResolveStatus.RESOLVED) {
throw parseError("Due to current limitations of the config parser, when an include statement is nested inside a list value, "
+ "${} substitutions inside the included file cannot be resolved correctly. Either move the include outside of the list value or "
+ "remove the ${} statements from the included file.");
}
if (!pathStack.isEmpty()) {
Path prefix = fullCurrentPath();
obj = obj.relativized(prefix);
}
for (String key : obj.keySet()) {
AbstractConfigValue v = obj.get(key);
AbstractConfigValue existing = values.get(key);
if (existing != null) {
values.put(key, v.withFallback(existing));
} else {
values.put(key, v);
}
}
}
private SimpleConfigList parseObjectForSeatunnel(ConfigNodeObject n) {
Map<String, AbstractConfigValue> values = new LinkedHashMap<String, AbstractConfigValue>();
List<AbstractConfigValue> valuesList = new ArrayList<AbstractConfigValue>();
SimpleConfigOrigin objectOrigin = lineOrigin();
boolean lastWasNewline = false;
ArrayList<AbstractConfigNode> nodes = new ArrayList<AbstractConfigNode>(n.children());
List<String> comments = new ArrayList<String>();
for (int i = 0; i < nodes.size(); i++) {
AbstractConfigNode node = nodes.get(i);
if (node instanceof ConfigNodeComment) {
lastWasNewline = false;
comments.add(((ConfigNodeComment) node).commentText());
} else if (node instanceof ConfigNodeSingleToken
&& Tokens.isNewline(((ConfigNodeSingleToken) node).token())) {
lineNumber++;
if (lastWasNewline) {
// Drop all comments if there was a blank line and start a new comment block
comments.clear();
}
lastWasNewline = true;
} else if (flavor != ConfigSyntax.JSON && node instanceof ConfigNodeInclude) {
parseInclude(values, (ConfigNodeInclude) node);
lastWasNewline = false;
} else if (node instanceof ConfigNodeField) {
lastWasNewline = false;
Path path = ((ConfigNodeField) node).path().value();
comments.addAll(((ConfigNodeField) node).comments());
// path must be on-stack while we parse the value
pathStack.push(path);
if (((ConfigNodeField) node).separator() == Tokens.PLUS_EQUALS) {
// we really should make this work, but for now throwing
// an exception is better than producing an incorrect
// result. See
// https://github.com/lightbend/config/issues/160
if (arrayCount > 0) {
throw parseError("Due to current limitations of the config parser, += does not work nested inside a list. "
+ "+= expands to a ${} substitution and the path in ${} cannot currently refer to list elements. "
+ "You might be able to move the += outside of the list and then refer to it from inside the list with ${}.");
}
// because we will put it in an array after the fact so
// we want this to be incremented during the parseValue
// below in order to throw the above exception.
arrayCount += 1;
}
AbstractConfigNodeValue valueNode;
AbstractConfigValue newValue;
valueNode = ((ConfigNodeField) node).value();
// comments from the key token go to the value token
newValue = parseValue(valueNode, comments);
if (((ConfigNodeField) node).separator() == Tokens.PLUS_EQUALS) {
arrayCount -= 1;
List<AbstractConfigValue> concat = new ArrayList<AbstractConfigValue>(2);
AbstractConfigValue previousRef = new ConfigReference(newValue.origin(),
new SubstitutionExpression(fullCurrentPath(), true /* optional */));
AbstractConfigValue list = new SimpleConfigList(newValue.origin(),
Collections.singletonList(newValue));
concat.add(previousRef);
concat.add(list);
newValue = ConfigConcatenation.concatenate(concat);
}
// Grab any trailing comments on the same line
if (i < nodes.size() - 1) {
i++;
while (i < nodes.size()) {
if (nodes.get(i) instanceof ConfigNodeComment) {
ConfigNodeComment comment = (ConfigNodeComment) nodes.get(i);
newValue = newValue.withOrigin(newValue.origin().appendComments(
Collections.singletonList(comment.commentText())));
break;
} else if (nodes.get(i) instanceof ConfigNodeSingleToken) {
ConfigNodeSingleToken curr = (ConfigNodeSingleToken) nodes.get(i);
if (curr.token() == Tokens.COMMA || Tokens.isIgnoredWhitespace(curr.token())) {
// keep searching, as there could still be a comment
} else {
i--;
break;
}
} else {
i--;
break;
}
i++;
}
}
pathStack.pop();
String key = path.first();
Path remaining = path.remainder();
if (remaining == null) {
Map<String, String> m = Collections.singletonMap("plugin_name", key);
newValue = newValue.withFallback(ConfigValueFactory.fromMap(m));
values.put(key, newValue);
valuesList.add(newValue);
} else {
if (flavor == ConfigSyntax.JSON) {
throw new ConfigException.BugOrBroken(
"somehow got multi-element path in JSON mode");
}
AbstractConfigObject obj = createValueUnderPath(
remaining, newValue);
Map<String, String> m = Collections.singletonMap("plugin_name", key);
obj = obj.withFallback(ConfigValueFactory.fromMap(m));
values.put(key, obj);
valuesList.add(obj);
}
}
}
return new SimpleConfigList(objectOrigin, valuesList);
}
private AbstractConfigObject parseObject(ConfigNodeObject n) {
Map<String, AbstractConfigValue> values = new LinkedHashMap<String, AbstractConfigValue>();
SimpleConfigOrigin objectOrigin = lineOrigin();
boolean lastWasNewline = false;
ArrayList<AbstractConfigNode> nodes = new ArrayList<AbstractConfigNode>(n.children());
List<String> comments = new ArrayList<String>();
for (int i = 0; i < nodes.size(); i++) {
AbstractConfigNode node = nodes.get(i);
if (node instanceof ConfigNodeComment) {
lastWasNewline = false;
comments.add(((ConfigNodeComment) node).commentText());
} else if (node instanceof ConfigNodeSingleToken
&& Tokens.isNewline(((ConfigNodeSingleToken) node).token())) {
lineNumber++;
if (lastWasNewline) {
// Drop all comments if there was a blank line and start a new comment block
comments.clear();
}
lastWasNewline = true;
} else if (flavor != ConfigSyntax.JSON && node instanceof ConfigNodeInclude) {
parseInclude(values, (ConfigNodeInclude) node);
lastWasNewline = false;
} else if (node instanceof ConfigNodeField) {
lastWasNewline = false;
Path path = ((ConfigNodeField) node).path().value();
comments.addAll(((ConfigNodeField) node).comments());
// path must be on-stack while we parse the value
pathStack.push(path);
if (((ConfigNodeField) node).separator() == Tokens.PLUS_EQUALS) {
// we really should make this work, but for now throwing
// an exception is better than producing an incorrect
// result. See
// https://github.com/lightbend/config/issues/160
if (arrayCount > 0) {
throw parseError("Due to current limitations of the config parser, += does not work nested inside a list. "
+ "+= expands to a ${} substitution and the path in ${} cannot currently refer to list elements. "
+ "You might be able to move the += outside of the list and then refer to it from inside the list with ${}.");
}
// because we will put it in an array after the fact so
// we want this to be incremented during the parseValue
// below in order to throw the above exception.
arrayCount += 1;
}
AbstractConfigNodeValue valueNode;
AbstractConfigValue newValue;
valueNode = ((ConfigNodeField) node).value();
// comments from the key token go to the value token
newValue = parseValue(valueNode, comments);
if (((ConfigNodeField) node).separator() == Tokens.PLUS_EQUALS) {
arrayCount -= 1;
List<AbstractConfigValue> concat = new ArrayList<AbstractConfigValue>(2);
AbstractConfigValue previousRef = new ConfigReference(newValue.origin(),
new SubstitutionExpression(fullCurrentPath(), true /* optional */));
AbstractConfigValue list = new SimpleConfigList(newValue.origin(),
Collections.singletonList(newValue));
concat.add(previousRef);
concat.add(list);
newValue = ConfigConcatenation.concatenate(concat);
}
// Grab any trailing comments on the same line
if (i < nodes.size() - 1) {
i++;
while (i < nodes.size()) {
if (nodes.get(i) instanceof ConfigNodeComment) {
ConfigNodeComment comment = (ConfigNodeComment) nodes.get(i);
newValue = newValue.withOrigin(newValue.origin().appendComments(
Collections.singletonList(comment.commentText())));
break;
} else if (nodes.get(i) instanceof ConfigNodeSingleToken) {
ConfigNodeSingleToken curr = (ConfigNodeSingleToken) nodes.get(i);
if (curr.token() == Tokens.COMMA || Tokens.isIgnoredWhitespace(curr.token())) {
// keep searching, as there could still be a comment
} else {
i--;
break;
}
} else {
i--;
break;
}
i++;
}
}
pathStack.pop();
String key = path.first();
Path remaining = path.remainder();
if (remaining == null) {
AbstractConfigValue existing = values.get(key);
if (existing != null) {
// In strict JSON, dups should be an error; while in
// our custom config language, they should be merged
// if the value is an object (or substitution that
// could become an object).
if (flavor == ConfigSyntax.JSON) {
throw parseError("JSON does not allow duplicate fields: '"
+ key + "' was already seen at "
+ existing.origin().description());
} else {
newValue = newValue.withFallback(existing);
}
}
values.put(key, newValue);
} else {
if (flavor == ConfigSyntax.JSON) {
throw new ConfigException.BugOrBroken(
"somehow got multi-element path in JSON mode");
}
AbstractConfigObject obj = createValueUnderPath(
remaining, newValue);
AbstractConfigValue existing = values.get(key);
if (existing != null) {
obj = obj.withFallback(existing);
}
values.put(key, obj);
}
}
}
return new SimpleConfigObject(objectOrigin, values);
}
private SimpleConfigList parseArray(ConfigNodeArray n) {
arrayCount += 1;
SimpleConfigOrigin arrayOrigin = lineOrigin();
List<AbstractConfigValue> values = new ArrayList<AbstractConfigValue>();
boolean lastWasNewLine = false;
List<String> comments = new ArrayList<String>();
AbstractConfigValue v = null;
for (AbstractConfigNode node : n.children()) {
if (node instanceof ConfigNodeComment) {
comments.add(((ConfigNodeComment) node).commentText());
lastWasNewLine = false;
} else if (node instanceof ConfigNodeSingleToken
&& Tokens.isNewline(((ConfigNodeSingleToken) node).token())) {
lineNumber++;
if (lastWasNewLine && v == null) {
comments.clear();
} else if (v != null) {
values.add(v.withOrigin(v.origin().appendComments(new ArrayList<String>(comments))));
comments.clear();
v = null;
}
lastWasNewLine = true;
} else if (node instanceof AbstractConfigNodeValue) {
lastWasNewLine = false;
if (v != null) {
values.add(v.withOrigin(v.origin().appendComments(new ArrayList<String>(comments))));
comments.clear();
}
v = parseValue((AbstractConfigNodeValue) node, comments);
}
}
// There shouldn't be any comments at this point, but add them just in case
if (v != null) {
values.add(v.withOrigin(v.origin().appendComments(new ArrayList<String>(comments))));
}
arrayCount -= 1;
return new SimpleConfigList(arrayOrigin, values);
}
AbstractConfigValue parse() {
AbstractConfigValue result = null;
ArrayList<String> comments = new ArrayList<String>();
boolean lastWasNewLine = false;
for (AbstractConfigNode node : document.children()) {
if (node instanceof ConfigNodeComment) {
comments.add(((ConfigNodeComment) node).commentText());
lastWasNewLine = false;
} else if (node instanceof ConfigNodeSingleToken) {
Token t = ((ConfigNodeSingleToken) node).token();
if (Tokens.isNewline(t)) {
lineNumber++;
if (lastWasNewLine && result == null) {
comments.clear();
} else if (result != null) {
result = result.withOrigin(result.origin().appendComments(new ArrayList<String>(comments)));
comments.clear();
break;
}
lastWasNewLine = true;
}
} else if (node instanceof ConfigNodeComplexValue) {
result = parseValue((ConfigNodeComplexValue) node, comments);
lastWasNewLine = false;
}
}
return result;
}
}
}
|
3e0e7c09295aedc00d6fae1f35301bfc7ceb6537 | 8,209 | java | Java | EZFM/src/main/java/com/shareworx/ezfm/worktask/areadetails/fileUpload/FilesUpload.java | xiaotian1210/frist-xiaotian | 58468408192c872e24341a73bc69b624439d6918 | [
"Apache-2.0"
] | null | null | null | EZFM/src/main/java/com/shareworx/ezfm/worktask/areadetails/fileUpload/FilesUpload.java | xiaotian1210/frist-xiaotian | 58468408192c872e24341a73bc69b624439d6918 | [
"Apache-2.0"
] | null | null | null | EZFM/src/main/java/com/shareworx/ezfm/worktask/areadetails/fileUpload/FilesUpload.java | xiaotian1210/frist-xiaotian | 58468408192c872e24341a73bc69b624439d6918 | [
"Apache-2.0"
] | null | null | null | 29.007067 | 107 | 0.71044 | 6,145 | package com.shareworx.ezfm.worktask.areadetails.fileUpload;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import org.apache.commons.fileupload.disk.DiskFileItem;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.commons.CommonsMultipartFile;
import com.aliyun.oss.ClientException;
import com.aliyun.oss.OSSClient;
import com.aliyun.oss.OSSException;
import com.aliyun.oss.model.CannedAccessControlList;
import com.aliyun.oss.model.CreateBucketRequest;
import com.shareworx.ezfm.problem.file.model.ProblemFileModel;
import com.shareworx.ezfm.utils.UserUtil;
public class FilesUpload {
// endpoint以杭州为例,其它region请按实际情况填写
String endpoint = "oss-cn-beijing.aliyuncs.com";
// accessKey请登录https://ak-console.aliyun.com/#/查看
String accessKeyId = "LTAIl2i8v7vXITug";
String accessKeySecret = "mHJvj1ppuBnvbxGFFHi4GU04wU8VuE";
// BucketName
String bucketName = "ezfmcloud";
// 文件上传路径
String filePath = "upload/";
// 服务器路径
String serverPath = "http://ezfmcloud.oss-cn-beijing.aliyuncs.com/";
/*
* 私有 只允许自己读写操作,其他用户没有权限
*/
CannedAccessControlList acl_private = CannedAccessControlList.Private;
/*
* 公共读写 允许自己和其他用户读写操作
*/
CannedAccessControlList acl_pub_readwrite = CannedAccessControlList.PublicReadWrite;
/*
* 公共读 只允许自己进行写操作,但是允许自己及其他用户进行读操作
*/
CannedAccessControlList acl_pub_red = CannedAccessControlList.PublicRead;
/**
* 后台工单报事附件上传OSS
*
* @param file
* 文件对象
* @param tabName
* 关联表名称
* @param correlationId
* 关联数据ID
* @return
* @throws IOException
* @throws ClientException
* @throws OSSException
*/
public ProblemFileModel upload(MultipartFile file, String tabName, String record_id, String create_time,
String crop) {
// 上传文件,文件名随机生成
String fileName = file.getOriginalFilename();
fileName = System.currentTimeMillis() + fileName.substring(fileName.indexOf(".") - 1, fileName.length());
// 创建文件路径
String newPath = filePath + tabName + "/" + record_id + "/" + fileName;
// 上传文件流
// 创建OSSClient实例
OSSClient ossClient = new OSSClient(endpoint, accessKeyId, accessKeySecret);
boolean exists = ossClient.doesBucketExist("ezfmcloud"); //指定Bucket名称
if(exists==false){
// key1创建名为saas01的BUCKET,权限为私有
createBucket(ossClient, "ezfmcloud", CannedAccessControlList.PublicReadWrite);
}
try {
ossClient.putObject(bucketName, newPath, file.getInputStream());
// 关闭client
ossClient.shutdown();
} catch (OSSException | ClientException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// 实例化文件记录实例
ProblemFileModel fileModel = new ProblemFileModel();
// 填充文件名
fileModel.setFile_name(file.getName());
// 填充文件路径
fileModel.setFile_path(serverPath + newPath);
// 填充文件类型
fileModel.setFile_type("1");
// 填充文件大小
fileModel.setFile_size(file.getSize() + "");
// 文件所属表名
fileModel.setTable_name(tabName);
// 文件记录ID
fileModel.setRecord_id(record_id);
// 上传人
fileModel.setCreate_user(UserUtil.getCurrentUserPk());
// 上传时间
fileModel.setCreate_time(create_time);
// 所属公司
fileModel.setPk_crop(crop);
return fileModel;
}
public String copyUpload(String tabName, String pk, String url) {
// 上传文件,文件名随机生成
String[] array = url.split("/");
String fileName = array[array.length-1];
String oldPath = url.replaceFirst(serverPath, "");
fileName = System.currentTimeMillis() + fileName.substring(fileName.indexOf(".") - 1, fileName.length());
// 创建文件路径
String newPath = filePath + tabName + "/" + pk + "/" + fileName;
// 上传文件流
// 创建OSSClient实例
OSSClient ossClient = new OSSClient(endpoint, accessKeyId, accessKeySecret);
boolean exists = ossClient.doesBucketExist("ezfmcloud"); //指定Bucket名称
if(exists==false){
// key1创建名为saas01的BUCKET,权限为私有
createBucket(ossClient, "ezfmcloud", CannedAccessControlList.PublicReadWrite);
}
try {
ossClient.putObject(bucketName, newPath, ossClient.getObject(bucketName, oldPath).getObjectContent());
// 关闭client
ossClient.shutdown();
} catch (OSSException | ClientException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// 所属公司
return serverPath + newPath;
}
/**
* 后台工单报事附件上传OSS
*
* @param file
* 文件对象
* @param tabName
* 关联表名称
* @param correlationId
* 关联数据ID
* @return
* @throws IOException
* @throws ClientException
* @throws OSSException
*/
public String upload(MultipartFile file, String tabName ,String pk) {
// 上传文件,文件名随机生成
String fileName = file.getOriginalFilename();
fileName = System.currentTimeMillis() + fileName.substring(fileName.indexOf(".") - 1, fileName.length());
// 创建文件路径
String newPath = filePath + tabName + "/" + pk + "/" + fileName;
// 上传文件流
// 创建OSSClient实例
OSSClient ossClient = new OSSClient(endpoint, accessKeyId, accessKeySecret);
boolean exists = ossClient.doesBucketExist("ezfmcloud"); //指定Bucket名称
if(exists==false){
// key1创建名为saas01的BUCKET,权限为私有
createBucket(ossClient, "ezfmcloud", CannedAccessControlList.PublicReadWrite);
}
try {
ossClient.putObject(bucketName, newPath, file.getInputStream());
// 关闭client
ossClient.shutdown();
} catch (OSSException | ClientException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return serverPath + newPath;
}
/**
* 后台工单报事附件上传OSS
*
* @param file
* 文件对象
* @param tabName
* 关联表名称
* @param correlationId
* 关联数据ID
* @return
* @throws IOException
* @throws ClientException
* @throws OSSException
*/
public ProblemFileModel upload(MultipartFile file, String tabName, String record_id, String create_time,
String crop,String pk_user) {
// 上传文件,文件名随机生成
String fileName = file.getOriginalFilename();
fileName = System.currentTimeMillis() +".png";
// 创建文件路径
String newPath = filePath + tabName + "/" + record_id + "/" + fileName;
// 上传文件流
// 创建OSSClient实例
OSSClient ossClient = new OSSClient(endpoint, accessKeyId, accessKeySecret);
boolean exists = ossClient.doesBucketExist("ezfmcloud"); //指定Bucket名称
if(exists==false){
// key1创建名为saas01的BUCKET,权限为私有
createBucket(ossClient, "ezfmcloud", CannedAccessControlList.PublicReadWrite);
}
try {
ossClient.putObject(bucketName, newPath, file.getInputStream());
// 关闭client
ossClient.shutdown();
} catch (OSSException | ClientException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// 实例化文件记录实例
ProblemFileModel fileModel = new ProblemFileModel();
// 填充文件名
fileModel.setFile_name(file.getName());
// 填充文件路径
fileModel.setFile_path(serverPath + newPath);
// 填充文件类型
fileModel.setFile_type("1");
// 填充文件大小
fileModel.setFile_size(file.getSize() + "");
// 文件所属表名
fileModel.setTable_name(tabName);
// 文件记录ID
fileModel.setRecord_id(record_id);
// 上传人
fileModel.setCreate_user(pk_user);
// 上传时间
fileModel.setCreate_time(create_time);
// 所属公司
fileModel.setPk_crop(crop);
return fileModel;
}
/**
* 判断文件是否为图片
*
* @param file
* @return
*/
public boolean isImage(MultipartFile mFile) {
CommonsMultipartFile cf = (CommonsMultipartFile) mFile;
DiskFileItem fi = (DiskFileItem) cf.getFileItem();
File file = fi.getStoreLocation();
boolean flag = false;
try {
ImageInputStream is = ImageIO.createImageInputStream(file);
if (null != is) {
Iterator<ImageReader> iter = ImageIO.getImageReaders(is);
if (!iter.hasNext()) {// 文件不是图片
return flag;
}
} else {
return flag;
}
is.close();
} catch (Exception e) {
return flag;
}
return flag;
}
public static void createBucket(OSSClient client, String bucketName, CannedAccessControlList acl) {
/* 通过一个Bucket对象来创建 */
CreateBucketRequest bucketObj = new CreateBucketRequest(null);// 构造函数入参为Bucket名称,可以为空
bucketObj.setBucketName(bucketName);// 设置bucketObj名称
bucketObj.setCannedACL(acl);// 设置bucketObj访问权限acl
client.createBucket(bucketObj);// 创建Bucket
}
}
|
3e0e7e0a1293c209dc19b3af4d7ffec0b2a905ce | 2,646 | java | Java | main/plugins/org.talend.core.repository/src/main/java/org/talend/core/repository/ui/actions/RestoreFolderUtil.java | coheigea/tcommon-studio-se | 681d9a8240b120f5633d751590ac09d31ea8879b | [
"Apache-2.0"
] | 75 | 2015-01-29T03:23:32.000Z | 2022-02-26T07:05:40.000Z | main/plugins/org.talend.core.repository/src/main/java/org/talend/core/repository/ui/actions/RestoreFolderUtil.java | coheigea/tcommon-studio-se | 681d9a8240b120f5633d751590ac09d31ea8879b | [
"Apache-2.0"
] | 813 | 2015-01-21T09:36:31.000Z | 2022-03-30T01:15:29.000Z | main/plugins/org.talend.core.repository/src/main/java/org/talend/core/repository/ui/actions/RestoreFolderUtil.java | coheigea/tcommon-studio-se | 681d9a8240b120f5633d751590ac09d31ea8879b | [
"Apache-2.0"
] | 272 | 2015-01-08T06:47:46.000Z | 2022-02-09T23:22:27.000Z | 35.28 | 122 | 0.654951 | 6,146 | // ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.repository.ui.actions;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Path;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.repository.model.IProxyRepositoryFactory;
import org.talend.repository.model.RepositoryNode;
/**
*
* Helper class for restoring folders.
*/
public class RestoreFolderUtil {
Map<ERepositoryObjectType, Set<String>> foldersMap = new HashMap<ERepositoryObjectType, Set<String>>();
IProxyRepositoryFactory factory = ProxyRepositoryFactory.getInstance();
IPath restoreFolderIfNotExists(ERepositoryObjectType type, RepositoryNode node) throws PersistenceException {
Set<String> folders = getFolders(type);
String oldPath = node.getObject().getProperty().getItem().getState().getPath();
return createFolders(folders, new Path(oldPath), type);
}
private IPath createFolders(Set<String> folders, IPath path, ERepositoryObjectType type) throws PersistenceException {
if (folders.contains(path.toString())) {
return path;
}
String lastSegment = path.lastSegment();
if (lastSegment != null) {
// create parent folder
IPath parent = createFolders(folders, path.removeLastSegments(1), type);
factory.createFolder(type, parent, lastSegment);
folders.add(path.toString());
return path;
} else {
return new Path(""); //$NON-NLS-1$
}
}
public Set<String> getFolders(ERepositoryObjectType type) throws PersistenceException {
Set<String> folders = foldersMap.get(type);
if (folders == null) {
folders = new HashSet<String>(factory.getFolders(type));
foldersMap.put(type, folders);
}
return folders;
}
public void clear() {
foldersMap.clear();
}
}
|
3e0e7f2b478379f51d1dfff3054b6ef0b2b9d275 | 234 | java | Java | spring-framework-demo/src/main/java/com/ant/context/dao/IndexDao1.java | juntao1108/spring-framework-5.0.X | b70d921c407fca88fac918c7251b2c67572d0b58 | [
"Apache-2.0"
] | 13 | 2019-02-18T00:52:02.000Z | 2020-11-17T14:23:14.000Z | spring-framework-demo/src/main/java/com/ant/context/dao/IndexDao1.java | juntao1108/spring-framework-5.0.X | b70d921c407fca88fac918c7251b2c67572d0b58 | [
"Apache-2.0"
] | null | null | null | spring-framework-demo/src/main/java/com/ant/context/dao/IndexDao1.java | juntao1108/spring-framework-5.0.X | b70d921c407fca88fac918c7251b2c67572d0b58 | [
"Apache-2.0"
] | 5 | 2019-08-29T05:45:06.000Z | 2021-03-16T03:44:26.000Z | 15.6 | 34 | 0.65812 | 6,147 | package com.ant.context.dao;
/**
* @ClassName IndexDao1
* @Description IndexDao1
* @Author Ant
* @Date 2019-05-23 18:11
* @Version 1.0
**/
public class IndexDao1 {
public IndexDao1 () {
System.out.println("dao1-init");
}
}
|
3e0e7f438f48a4fc07b0b96788602e310f89b12c | 1,719 | java | Java | src/main/java/com/example/rest/Employee.java | AaronWitter/EmployeeTestApp | b7b6ad17b3b62973229e7bffe1ceec9f2c5d699d | [
"MIT"
] | null | null | null | src/main/java/com/example/rest/Employee.java | AaronWitter/EmployeeTestApp | b7b6ad17b3b62973229e7bffe1ceec9f2c5d699d | [
"MIT"
] | null | null | null | src/main/java/com/example/rest/Employee.java | AaronWitter/EmployeeTestApp | b7b6ad17b3b62973229e7bffe1ceec9f2c5d699d | [
"MIT"
] | null | null | null | 20.464286 | 135 | 0.586969 | 6,148 | /* Copyright © 2017 Oracle and/or its affiliates. All rights reserved. */
package com.example.rest;
public class Employee {
private final long id;
private final String firstName;
private final String lastName;
private final String email;
private final String phone;
private final String birthDate;
private final String title;
private final String dept;
public Employee(){
super();
id = 0;
firstName = "";
lastName = "";
email = "";
phone = "";
birthDate = "";
title = "";
dept = "";
}
public Employee(long id, String firstName, String lastName, String email, String phone, String birthDate, String title, String dept){
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
this.phone = phone;
this.birthDate = birthDate;
this.title = title;
this.dept = dept;
}
public long getId(){
return this.id;
}
public String getFirstName() {
return this.firstName;
}
public String getLastName() {
return this.lastName;
}
public String getEmail(){
return this.email;
}
public String getPhone(){
return this.phone;
}
public String getBirthDate() {
return this.birthDate;
}
public String getTitle() {
return this.title;
}
public String getDept(){
return this.dept;
}
@Override
public String toString(){
return "ID: " + id
+ " First Name: " + firstName
+ " Last Name: " + lastName
+ " EMail: " + email
+ " Phone: " + phone
+ " Birth Date: " + birthDate
+ " Title: " + title
+ " Department: " + dept;
}
}
|
3e0e7f90fc3b88044d0f6392329f00ab571375aa | 216 | java | Java | src/main/java/pro/akvel/spring/converter/generator/param/ConstructIndexParam.java | mikheyrojo/spring-xml-to-java-converter | 1ef0578c5e97dd827961e2a0808bed1722116feb | [
"MIT"
] | null | null | null | src/main/java/pro/akvel/spring/converter/generator/param/ConstructIndexParam.java | mikheyrojo/spring-xml-to-java-converter | 1ef0578c5e97dd827961e2a0808bed1722116feb | [
"MIT"
] | null | null | null | src/main/java/pro/akvel/spring/converter/generator/param/ConstructIndexParam.java | mikheyrojo/spring-xml-to-java-converter | 1ef0578c5e97dd827961e2a0808bed1722116feb | [
"MIT"
] | 1 | 2021-09-20T11:03:48.000Z | 2021-09-20T11:03:48.000Z | 16.615385 | 63 | 0.736111 | 6,149 | package pro.akvel.spring.converter.generator.param;
/**
* ConstructIndexParam
*
* @author akvel
* @since 14.08.2020
*/
public interface ConstructIndexParam extends ConstructorParam {
Integer getIndex();
}
|
3e0e8003bf4cee4668e3f0bd2dda89f76123f69a | 1,729 | java | Java | xap-core/xap-datagrid/src/main/java/com/gigaspaces/internal/utils/Singletons.java | InsightEdge/xap | 25fb3f6de1b25fa2abeefd73ef30fe4eaef07cf6 | [
"Apache-2.0"
] | 2 | 2020-02-05T16:03:24.000Z | 2021-07-26T19:56:54.000Z | xap-core/xap-datagrid/src/main/java/com/gigaspaces/internal/utils/Singletons.java | InsightEdge/xap | 25fb3f6de1b25fa2abeefd73ef30fe4eaef07cf6 | [
"Apache-2.0"
] | null | null | null | xap-core/xap-datagrid/src/main/java/com/gigaspaces/internal/utils/Singletons.java | InsightEdge/xap | 25fb3f6de1b25fa2abeefd73ef30fe4eaef07cf6 | [
"Apache-2.0"
] | 1 | 2021-07-26T19:56:55.000Z | 2021-07-26T19:56:55.000Z | 28.816667 | 88 | 0.665703 | 6,150 | /*
* Copyright (c) 2008-2016, GigaSpaces Technologies, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gigaspaces.internal.utils;
import java.util.HashMap;
import java.util.Map;
/**
* @author Niv Ingberg
* @since 10.0
*/
@com.gigaspaces.api.InternalApi
public class Singletons {
private static final Map<String, Object> _instances = new HashMap<String, Object>();
private static final Object _lock = new Object();
// static member to prevent gc of class.
private static final Singletons _instance = new Singletons();
/**
* private ctor to prevent instantiation
*/
private Singletons() {
}
public static Object get(String name) {
synchronized (_lock) {
return _instances.get(name);
}
}
public static Object putIfAbsent(String name, Object newValue) {
synchronized (_lock) {
if (_instances.containsKey(name))
return _instances.get(name);
_instances.put(name, newValue);
return newValue;
}
}
public static void remove(String name) {
synchronized (_lock) {
_instances.remove(name);
}
}
}
|
3e0e8026de69720a203d2561cde308f4c53a869d | 1,911 | java | Java | src/noaa/coastwatch/util/ResamplingMapFactory.java | phollemans/cwutils | b555d096edd284818fa2aa15a73f703693cd23d9 | [
"Unlicense"
] | 1 | 2019-09-09T01:38:45.000Z | 2019-09-09T01:38:45.000Z | src/noaa/coastwatch/util/ResamplingMapFactory.java | phollemans/cwutils | b555d096edd284818fa2aa15a73f703693cd23d9 | [
"Unlicense"
] | 2 | 2019-07-16T01:45:28.000Z | 2019-09-27T04:21:19.000Z | src/noaa/coastwatch/util/ResamplingMapFactory.java | phollemans/cwutils | b555d096edd284818fa2aa15a73f703693cd23d9 | [
"Unlicense"
] | null | null | null | 28.954545 | 82 | 0.580324 | 6,151 | ////////////////////////////////////////////////////////////////////////
/*
File: ResamplingMapFactory.java
Author: Peter Hollemans
Date: 2019/02/04
CoastWatch Software Library and Utilities
Copyright (c) 2019 National Oceanic and Atmospheric Administration
All rights reserved.
Developed by: CoastWatch / OceanWatch
Center for Satellite Applications and Research
http://coastwatch.noaa.gov
For conditions of distribution and use, see the accompanying
license.txt file.
*/
////////////////////////////////////////////////////////////////////////
// Package
// -------
package noaa.coastwatch.util;
// Imports
// -------
import noaa.coastwatch.util.ResamplingMap;
/**
* A <code>ResamplingMapFactory</code> is an interfaace for objects that
* create {@link ResamplingMap} objects on-demand, based on the coordinates
* that need to be remapped. A factory instance should be thread-safe so
* that map objects can be created simultaneously from multiple threads
* if needed.
*
* @author Peter Hollemans
* @since 3.5.0
*/
public interface ResamplingMapFactory {
////////////////////////////////////////////////////////////
/**
* Creates a new resampling map that covers a specified set of destination
* coordinates.
*
* @param start the starting rectangle coordinates in the destination
* space as [row, col].
* @param length the size of the rectangle in destination space as [rows, cols].
*
* @return a resampling object that covers the specified destination rectangle
* or null if the destination rectangle has no corresponding coordinates
* in the source space.
*/
public ResamplingMap create (
int[] start,
int[] length
);
////////////////////////////////////////////////////////////
} // ResamplingMap class
////////////////////////////////////////////////////////////////////////
|
3e0e80cfb54c49c401eba6db3aed3763b8e714c4 | 2,883 | java | Java | de.fhdo.ddmm.operationdsl.ui/xtend-gen/de/fhdo/ddmm/operationdsl/ui/highlighting/HighlightingCalculator.java | icsa2019-author1/ICSA2019 | ca32c65f816dc3bf6a46f98ea0e7ac81f4e1f8d1 | [
"MIT"
] | null | null | null | de.fhdo.ddmm.operationdsl.ui/xtend-gen/de/fhdo/ddmm/operationdsl/ui/highlighting/HighlightingCalculator.java | icsa2019-author1/ICSA2019 | ca32c65f816dc3bf6a46f98ea0e7ac81f4e1f8d1 | [
"MIT"
] | null | null | null | de.fhdo.ddmm.operationdsl.ui/xtend-gen/de/fhdo/ddmm/operationdsl/ui/highlighting/HighlightingCalculator.java | icsa2019-author1/ICSA2019 | ca32c65f816dc3bf6a46f98ea0e7ac81f4e1f8d1 | [
"MIT"
] | null | null | null | 42.397059 | 152 | 0.73153 | 6,152 | package de.fhdo.ddmm.operationdsl.ui.highlighting;
import com.google.common.base.Objects;
import de.fhdo.ddmm.operation.OperationNode;
import de.fhdo.ddmm.operation.OperationPackage;
import de.fhdo.ddmm.operationdsl.ui.highlighting.HighlightingConfiguration;
import java.util.List;
import java.util.function.Consumer;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.xtext.ide.editor.syntaxcoloring.IHighlightedPositionAcceptor;
import org.eclipse.xtext.ide.editor.syntaxcoloring.ISemanticHighlightingCalculator;
import org.eclipse.xtext.nodemodel.INode;
import org.eclipse.xtext.nodemodel.util.NodeModelUtils;
import org.eclipse.xtext.resource.XtextResource;
import org.eclipse.xtext.util.CancelIndicator;
import org.eclipse.xtext.xbase.lib.Functions.Function1;
import org.eclipse.xtext.xbase.lib.IteratorExtensions;
import org.eclipse.xtext.xbase.lib.Procedures.Procedure1;
/**
* Provide custom syntax highlighting for certain elements.
*/
@SuppressWarnings("all")
public class HighlightingCalculator implements ISemanticHighlightingCalculator {
/**
* Provide highlighting
*/
@Override
public void provideHighlightingFor(final XtextResource resource, final IHighlightedPositionAcceptor acceptor, final CancelIndicator cancelIndicator) {
this.provideHighlightingForAnnotations(resource, acceptor);
}
/**
* Provide highlighting for annotations
*/
private void provideHighlightingForAnnotations(final XtextResource resource, final IHighlightedPositionAcceptor acceptor) {
final Function1<EObject, Boolean> _function = (EObject it) -> {
return Boolean.valueOf((it instanceof OperationNode));
};
final Procedure1<EObject> _function_1 = (EObject it) -> {
final List<INode> nodes = NodeModelUtils.findNodesForFeature(it,
OperationPackage.Literals.OPERATION_NODE__TECHNOLOGIES);
final Consumer<INode> _function_2 = (INode it_1) -> {
INode currentSibling = it_1.getPreviousSibling();
boolean annotationBeginReached = false;
while (((currentSibling != null) && (!annotationBeginReached))) {
{
String _text = currentSibling.getText();
boolean _equals = Objects.equal(_text, "@");
if (_equals) {
annotationBeginReached = true;
}
String _text_1 = currentSibling.getText();
boolean _notEquals = (!Objects.equal(_text_1, "("));
if (_notEquals) {
acceptor.addPosition(currentSibling.getOffset(), currentSibling.getLength(),
HighlightingConfiguration.ANNOTATION_ID);
}
currentSibling = currentSibling.getPreviousSibling();
}
}
};
nodes.forEach(_function_2);
};
IteratorExtensions.<EObject>forEach(IteratorExtensions.<EObject>filter(resource.getAllContents(), _function), _function_1);
}
}
|
3e0e80f6d5bbb2b4742e4b4b8361dccee5ecebd3 | 659 | java | Java | spring-cloud/spring-cloud-netflix-feign/src/main/java/com/baeldung/cloud/netflix/feign/config/CustomErrorDecoder.java | eas5/tutorials | 4b460a9e25f6f0b0292e98144add0ce631a9e05e | [
"MIT"
] | 32,544 | 2015-01-02T16:59:22.000Z | 2022-03-31T21:04:05.000Z | spring-cloud/spring-cloud-netflix-feign/src/main/java/com/baeldung/cloud/netflix/feign/config/CustomErrorDecoder.java | eas5/tutorials | 4b460a9e25f6f0b0292e98144add0ce631a9e05e | [
"MIT"
] | 1,577 | 2015-02-21T17:47:03.000Z | 2022-03-31T14:25:58.000Z | spring-cloud/spring-cloud-netflix-feign/src/main/java/com/baeldung/cloud/netflix/feign/config/CustomErrorDecoder.java | eas5/tutorials | 4b460a9e25f6f0b0292e98144add0ce631a9e05e | [
"MIT"
] | 55,853 | 2015-01-01T07:52:09.000Z | 2022-03-31T21:08:15.000Z | 29.954545 | 70 | 0.676783 | 6,153 | package com.baeldung.cloud.netflix.feign.config;
import com.baeldung.cloud.netflix.feign.exception.BadRequestException;
import com.baeldung.cloud.netflix.feign.exception.NotFoundException;
import feign.Response;
import feign.codec.ErrorDecoder;
public class CustomErrorDecoder implements ErrorDecoder {
@Override
public Exception decode(String methodKey, Response response) {
switch (response.status()){
case 400:
return new BadRequestException();
case 404:
return new NotFoundException();
default:
return new Exception("Generic error");
}
}
}
|
3e0e8133484c4bb912ed96551fbcd60233460d87 | 1,295 | java | Java | yidao-common/src/main/java/com/ruoyi/common/oss/OSSStaticPeram.java | zd938780519/YiDaoProject | 0bcf381e39baa17377979f97b95d9b98b6ee57d6 | [
"MIT"
] | null | null | null | yidao-common/src/main/java/com/ruoyi/common/oss/OSSStaticPeram.java | zd938780519/YiDaoProject | 0bcf381e39baa17377979f97b95d9b98b6ee57d6 | [
"MIT"
] | 1 | 2021-09-20T20:53:20.000Z | 2021-09-20T20:53:20.000Z | yidao-common/src/main/java/com/ruoyi/common/oss/OSSStaticPeram.java | zd938780519/YiDaoProject | 0bcf381e39baa17377979f97b95d9b98b6ee57d6 | [
"MIT"
] | null | null | null | 47.962963 | 136 | 0.756757 | 6,154 | package com.ruoyi.common.oss;
/**
* 阿里云OSS配置
*/
public class OSSStaticPeram {
// endpoint是访问OSS的域名。如果您已经在OSS的控制台上 创建了Bucket,请在控制台上查看域名。
// 如果您还没有创建Bucket,endpoint选择请参看文档中心的“开发人员指南 > 基本概念 > 访问域名”,
// 链接地址是:https://help.aliyun.com/document_detail/oss/user_guide/oss_concept/endpoint.html?spm=5176.docoss/user_guide/endpoint_region
// endpoint的格式形如“http://oss-cn-hangzhou.aliyuncs.com/”,注意http://后不带bucket名称,
// 比如“http://bucket-name.oss-cn-hangzhou.aliyuncs.com”,是错误的endpoint,请去掉其中的“bucket-name”。
public static final String ENDPOINT = "http://oss-cn-shanghai.aliyuncs.com";
// accessKeyId和accessKeySecret是OSS的访问密钥,您可以在控制台上创建和查看,
// 创建和查看访问密钥的链接地址是:https://ak-console.aliyun.com/#/。
// 注意:accessKeyId和accessKeySecret前后都没有空格,从控制台复制时请检查并去除多余的空格。
//public static final String ACCESS_KEY_ID = "LTAIvDgTLhFSx1mB";
//public static final String ACCESS_KEY_SECRET = "QUUNxrV0jDy0VyX2n2W1vYNDRB09NZ";
public static final String ACCESS_KEY_ID = "LTAImyrf18KHUtlP";
public static final String ACCESS_KEY_SECRET = "kV6XSClCVQA6fYcqd6Mfh8uzhRHcAk";
// Bucket用来管理所存储Object的存储空间,详细描述请参看“开发人员指南 > 基本概念 > OSS基本概念介绍”。
// Bucket命名规范如下:只能包括小写字母,数字和短横线(-),必须以小写字母或者数字开头,长度必须在3-63字节之间。
public static final String BUCKET_NAME = "yd-2019-test";
}
|
3e0e81dbafcfcd3ad007c628271b5ec8f970c991 | 4,462 | java | Java | ale/src/main/java/org/bytedeco/ale/presets/ale.java | javyxx/javacpp-presets | 5c3297f006d7d1ce31c36181a2bc4016a9a9e8f1 | [
"Apache-2.0"
] | 2 | 2019-05-08T18:56:11.000Z | 2019-12-05T13:17:42.000Z | ale/src/main/java/org/bytedeco/ale/presets/ale.java | javyxx/javacpp-presets | 5c3297f006d7d1ce31c36181a2bc4016a9a9e8f1 | [
"Apache-2.0"
] | null | null | null | ale/src/main/java/org/bytedeco/ale/presets/ale.java | javyxx/javacpp-presets | 5c3297f006d7d1ce31c36181a2bc4016a9a9e8f1 | [
"Apache-2.0"
] | null | null | null | 64.666667 | 158 | 0.680861 | 6,155 | /*
* Copyright (C) 2017 Samuel Audet
*
* Licensed either under the Apache License, Version 2.0, or (at your option)
* under the terms of the GNU General Public License as published by
* the Free Software Foundation (subject to the "Classpath" exception),
* either version 2, or any later version (collectively, the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.gnu.org/licenses/
* http://www.gnu.org/software/classpath/license.html
*
* or as provided in the LICENSE.txt file that accompanied this code.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bytedeco.ale.presets;
import java.nio.ByteBuffer;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacpp.Pointer;
import org.bytedeco.javacpp.annotation.Cast;
import org.bytedeco.javacpp.annotation.Platform;
import org.bytedeco.javacpp.annotation.Properties;
import org.bytedeco.javacpp.tools.Info;
import org.bytedeco.javacpp.tools.InfoMap;
import org.bytedeco.javacpp.tools.InfoMapper;
/**
*
* @author Samuel Audet
*/
@Properties(
value = {
@Platform(value = {"linux-x86", "macosx", "windows"}, compiler = "cpp11", define = "UNIQUE_PTR_NAMESPACE std", link = "ale",
include = {"emucore/m6502/src/bspf/src/bspf.hxx", "emucore/m6502/src/Device.hxx", "emucore/Control.hxx", "emucore/Event.hxx",
"emucore/Random.hxx", "common/Constants.h", "common/Array.hxx", "common/display_screen.h", "emucore/M6532.hxx",
"emucore/Cart.hxx", "emucore/Console.hxx", "emucore/Sound.hxx", "emucore/Settings.hxx", "emucore/OSystem.hxx",
"common/ColourPalette.hpp", "common/ScreenExporter.hpp", "environment/ale_ram.hpp", "environment/ale_screen.hpp",
"environment/ale_state.hpp", "environment/stella_environment_wrapper.hpp", "environment/stella_environment.hpp", "ale_interface.hpp"}),
@Platform(value = "linux-x86", preload = "SDL-1.2@.0", preloadpath = {"/usr/lib32/", "/usr/lib/"}),
@Platform(value = "linux-x86_64", preload = "SDL-1.2@.0", preloadpath = {"/usr/lib64/", "/usr/lib/"}),
@Platform(value = "macosx-x86_64", preload = "SDL-1.2@.0", preloadpath = "/usr/local/lib/"),
@Platform(value = "windows-x86", preload = {"SDL", "libale"}, preloadpath = "/mingw32/bin/"),
@Platform(value = "windows-x86_64", preload = {"SDL", "libale"}, preloadpath = "/mingw64/bin")},
target = "org.bytedeco.ale", global = "org.bytedeco.ale.global.ale")
public class ale implements InfoMapper {
public void map(InfoMap infoMap) {
infoMap.put(new Info("DEBUGGER_SUPPORT", "CHEATCODE_SUPPORT").define(false))
.put(new Info("BSPF_strcasecmp", "BSPF_strncasecmp", "BSPF_snprintf", "BSPF_vsnprintf").cppTypes())
.put(new Info("Common::Array<Resolution>").pointerTypes("ResolutionList").define())
.put(new Info("StellaEnvironmentWrapper::m_environment").javaText("public native @MemberGetter @ByRef StellaEnvironment m_environment();"))
.put(new Info("StellaEnvironment::getWrapper").javaText("public native @Name(\"getWrapper().get\") StellaEnvironmentWrapper getWrapper();"))
.put(new Info("ALEInterface::theOSystem").javaText("public native @Name(\"theOSystem.get\") OSystem theOSystem();"))
.put(new Info("ALEInterface::theSettings").javaText("public native @Name(\"theSettings.get\") Settings theSettings();"))
.put(new Info("ALEInterface::romSettings").javaText("public native @Name(\"romSettings.get\") RomSettings romSettings();"))
.put(new Info("ALEInterface::environment").javaText("public native @Name(\"environment.get\") StellaEnvironment environment();"))
.put(new Info("AtariVox", "Common::Array<Resolution>::contains", "ALEState::reset", "CheatManager", "CommandMenu", "Debugger",
"GameController", "Launcher", "Menu", "Properties", "PropertiesSet", "VideoDialog").skip());
}
}
|
3e0e81f8ffe77f8cfe0113161664d210ca398c2f | 3,298 | java | Java | src/main/java/com/hujiang/project/zhgd/vehicleImg/controller/VehicleImgController.java | xieyagit/zhgd | 45ac870a62c7323a1bb40b99fb8f0c93e38aed0d | [
"MIT"
] | null | null | null | src/main/java/com/hujiang/project/zhgd/vehicleImg/controller/VehicleImgController.java | xieyagit/zhgd | 45ac870a62c7323a1bb40b99fb8f0c93e38aed0d | [
"MIT"
] | null | null | null | src/main/java/com/hujiang/project/zhgd/vehicleImg/controller/VehicleImgController.java | xieyagit/zhgd | 45ac870a62c7323a1bb40b99fb8f0c93e38aed0d | [
"MIT"
] | 1 | 2021-12-06T09:51:17.000Z | 2021-12-06T09:51:17.000Z | 26.384 | 84 | 0.715282 | 6,156 | package com.hujiang.project.zhgd.vehicleImg.controller;
import com.hujiang.common.utils.poi.ExcelUtil;
import com.hujiang.framework.aspectj.lang.annotation.Log;
import com.hujiang.framework.aspectj.lang.enums.BusinessType;
import com.hujiang.framework.web.controller.BaseController;
import com.hujiang.framework.web.domain.AjaxResult;
import com.hujiang.framework.web.page.TableDataInfo;
import com.hujiang.project.zhgd.vehicleImg.domain.VehicleImg;
import com.hujiang.project.zhgd.vehicleImg.service.IVehicleImgService;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* 车牌照片 信息操作处理
*
* @author hujiang
* @date 2019-06-19
*/
@Controller
@RequestMapping("/moredian/vehicleImg")
public class VehicleImgController extends BaseController
{
private String prefix = "moredian/vehicleImg";
@Autowired
private IVehicleImgService vehicleImgService;
@RequiresPermissions("moredian:vehicleImg:view")
@GetMapping()
public String vehicleImg()
{
return prefix + "/vehicleImg";
}
/**
* 查询车牌照片列表
*/
@RequiresPermissions("moredian:vehicleImg:list")
@PostMapping("/list")
@ResponseBody
public TableDataInfo list(VehicleImg vehicleImg)
{
startPage();
List<VehicleImg> list = vehicleImgService.selectVehicleImgList(vehicleImg);
return getDataTable(list);
}
/**
* 导出车牌照片列表
*/
@RequiresPermissions("moredian:vehicleImg:export")
@PostMapping("/export")
@ResponseBody
public AjaxResult export(VehicleImg vehicleImg)
{
List<VehicleImg> list = vehicleImgService.selectVehicleImgList(vehicleImg);
ExcelUtil<VehicleImg> util = new ExcelUtil<VehicleImg>(VehicleImg.class);
return util.exportExcel(list, "vehicleImg");
}
/**
* 新增车牌照片
*/
@GetMapping("/add")
public String add()
{
return prefix + "/add";
}
/**
* 新增保存车牌照片
*/
@RequiresPermissions("moredian:vehicleImg:add")
@Log(title = "车牌照片", businessType = BusinessType.INSERT)
@PostMapping("/add")
@ResponseBody
public AjaxResult addSave(VehicleImg vehicleImg)
{
return toAjax(vehicleImgService.insertVehicleImg(vehicleImg));
}
/**
* 修改车牌照片
*/
@GetMapping("/edit/{id}")
public String edit(@PathVariable("id") Integer id, ModelMap mmap)
{
VehicleImg vehicleImg = vehicleImgService.selectVehicleImgById(id);
mmap.put("vehicleImg", vehicleImg);
return prefix + "/edit";
}
/**
* 修改保存车牌照片
*/
@RequiresPermissions("moredian:vehicleImg:edit")
@Log(title = "车牌照片", businessType = BusinessType.UPDATE)
@PostMapping("/edit")
@ResponseBody
public AjaxResult editSave(VehicleImg vehicleImg)
{
return toAjax(vehicleImgService.updateVehicleImg(vehicleImg));
}
/**
* 删除车牌照片
*/
@RequiresPermissions("moredian:vehicleImg:remove")
@Log(title = "车牌照片", businessType = BusinessType.DELETE)
@PostMapping( "/rmoeve")
@ResponseBody
public AjaxResult remove(String ids)
{
return toAjax(vehicleImgService.deleteVehicleImgByIds(ids));
}
}
|
3e0e81fff3475f6d8a49a0aaa16340e0df06ee52 | 425 | java | Java | gulimall-sms/src/main/java/com/atguigu/gulimall/sms/dao/CouponSpuCategoryRelationDao.java | ygx12130/gulimail | e78e11c74a7fb2af1f3a51b7b5a1f49aaafb7c70 | [
"Apache-2.0"
] | 1 | 2019-08-05T03:55:52.000Z | 2019-08-05T03:55:52.000Z | gulimall-sms/src/main/java/com/atguigu/gulimall/sms/dao/CouponSpuCategoryRelationDao.java | ygx12130/gulimail | e78e11c74a7fb2af1f3a51b7b5a1f49aaafb7c70 | [
"Apache-2.0"
] | 2 | 2021-04-22T16:55:30.000Z | 2021-09-20T20:52:03.000Z | gulimall-sms/src/main/java/com/atguigu/gulimall/sms/dao/CouponSpuCategoryRelationDao.java | ygx12130/gulimail | e78e11c74a7fb2af1f3a51b7b5a1f49aaafb7c70 | [
"Apache-2.0"
] | null | null | null | 23.5 | 99 | 0.791962 | 6,157 | package com.atguigu.gulimall.sms.dao;
import com.atguigu.gulimall.sms.entity.CouponSpuCategoryRelationEntity;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* 优惠券分类关联
*
* @author ygx12130
* @email kenaa@example.com
* @date 2019-09-04 17:59:26
*/
@Mapper
public interface CouponSpuCategoryRelationDao extends BaseMapper<CouponSpuCategoryRelationEntity> {
}
|
3e0e8296f61a9923423c40881bdac66e46fdc1ec | 54,039 | java | Java | hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java | enis/hbase | c8e9a295c133ef9507a84ab9c70d18563e2c22ad | [
"Apache-2.0"
] | 1 | 2019-01-23T11:13:57.000Z | 2019-01-23T11:13:57.000Z | hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java | enis/hbase | c8e9a295c133ef9507a84ab9c70d18563e2c22ad | [
"Apache-2.0"
] | null | null | null | hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java | enis/hbase | c8e9a295c133ef9507a84ab9c70d18563e2c22ad | [
"Apache-2.0"
] | null | null | null | 44.771334 | 100 | 0.678788 | 6,158 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver.wal;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.annotations.VisibleForTesting;
import com.lmax.disruptor.BlockingWaitStrategy;
import com.lmax.disruptor.EventHandler;
import com.lmax.disruptor.ExceptionHandler;
import com.lmax.disruptor.LifecycleAware;
import com.lmax.disruptor.TimeoutException;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.ProducerType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.FSHLogProvider;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALPrettyPrinter;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.htrace.NullScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
/**
* The default implementation of FSWAL.
*/
@InterfaceAudience.Private
public class FSHLog extends AbstractFSWAL<Writer> {
// IMPLEMENTATION NOTES:
//
// At the core is a ring buffer. Our ring buffer is the LMAX Disruptor. It tries to
// minimize synchronizations and volatile writes when multiple contending threads as is the case
// here appending and syncing on a single WAL. The Disruptor is configured to handle multiple
// producers but it has one consumer only (the producers in HBase are IPC Handlers calling append
// and then sync). The single consumer/writer pulls the appends and syncs off the ring buffer.
// When a handler calls sync, it is given back a future. The producer 'blocks' on the future so
// it does not return until the sync completes. The future is passed over the ring buffer from
// the producer/handler to the consumer thread where it does its best to batch up the producer
// syncs so one WAL sync actually spans multiple producer sync invocations. How well the
// batching works depends on the write rate; i.e. we tend to batch more in times of
// high writes/syncs.
//
// Calls to append now also wait until the append has been done on the consumer side of the
// disruptor. We used to not wait but it makes the implementation easier to grok if we have
// the region edit/sequence id after the append returns.
//
// TODO: Handlers need to coordinate appending AND syncing. Can we have the threads contend
// once only? Probably hard given syncs take way longer than an append.
//
// The consumer threads pass the syncs off to multiple syncing threads in a round robin fashion
// to ensure we keep up back-to-back FS sync calls (FS sync calls are the long poll writing the
// WAL). The consumer thread passes the futures to the sync threads for it to complete
// the futures when done.
//
// The 'sequence' in the below is the sequence of the append/sync on the ringbuffer. It
// acts as a sort-of transaction id. It is always incrementing.
//
// The RingBufferEventHandler class hosts the ring buffer consuming code. The threads that
// do the actual FS sync are implementations of SyncRunner. SafePointZigZagLatch is a
// synchronization class used to halt the consumer at a safe point -- just after all outstanding
// syncs and appends have completed -- so the log roller can swap the WAL out under it.
//
// We use ring buffer sequence as txid of FSWALEntry and SyncFuture.
private static final Log LOG = LogFactory.getLog(FSHLog.class);
/**
* The nexus at which all incoming handlers meet. Does appends and sync with an ordering. Appends
* and syncs are each put on the ring which means handlers need to smash up against the ring twice
* (can we make it once only? ... maybe not since time to append is so different from time to sync
* and sometimes we don't want to sync or we want to async the sync). The ring is where we make
* sure of our ordering and it is also where we do batching up of handler sync calls.
*/
private final Disruptor<RingBufferTruck> disruptor;
/**
* An executorservice that runs the disruptor AppendEventHandler append executor.
*/
private final ExecutorService appendExecutor;
/**
* This fellow is run by the above appendExecutor service but it is all about batching up appends
* and syncs; it may shutdown without cleaning out the last few appends or syncs. To guard against
* this, keep a reference to this handler and do explicit close on way out to make sure all
* flushed out before we exit.
*/
private final RingBufferEventHandler ringBufferEventHandler;
/**
* FSDataOutputStream associated with the current SequenceFile.writer
*/
private FSDataOutputStream hdfs_out;
// All about log rolling if not enough replicas outstanding.
// Minimum tolerable replicas, if the actual value is lower than it, rollWriter will be triggered
private final int minTolerableReplication;
// If live datanode count is lower than the default replicas value,
// RollWriter will be triggered in each sync(So the RollWriter will be
// triggered one by one in a short time). Using it as a workaround to slow
// down the roll frequency triggered by checkLowReplication().
private final AtomicInteger consecutiveLogRolls = new AtomicInteger(0);
private final int lowReplicationRollLimit;
// If consecutiveLogRolls is larger than lowReplicationRollLimit,
// then disable the rolling in checkLowReplication().
// Enable it if the replications recover.
private volatile boolean lowReplicationRollEnabled = true;
/** Number of log close errors tolerated before we abort */
private final int closeErrorsTolerated;
private final AtomicInteger closeErrorCount = new AtomicInteger();
/**
* Exception handler to pass the disruptor ringbuffer. Same as native implementation only it logs
* using our logger instead of java native logger.
*/
static class RingBufferExceptionHandler implements ExceptionHandler {
@Override
public void handleEventException(Throwable ex, long sequence, Object event) {
LOG.error("Sequence=" + sequence + ", event=" + event, ex);
throw new RuntimeException(ex);
}
@Override
public void handleOnStartException(Throwable ex) {
LOG.error(ex);
throw new RuntimeException(ex);
}
@Override
public void handleOnShutdownException(Throwable ex) {
LOG.error(ex);
throw new RuntimeException(ex);
}
}
/**
* Constructor.
* @param fs filesystem handle
* @param root path for stored and archived wals
* @param logDir dir where wals are stored
* @param conf configuration to use
*/
public FSHLog(final FileSystem fs, final Path root, final String logDir, final Configuration conf)
throws IOException {
this(fs, root, logDir, HConstants.HREGION_OLDLOGDIR_NAME, conf, null, true, null, null);
}
/**
* Create an edit log at the given <code>dir</code> location. You should never have to load an
* existing log. If there is a log at startup, it should have already been processed and deleted
* by the time the WAL object is started up.
* @param fs filesystem handle
* @param rootDir path to where logs and oldlogs
* @param logDir dir where wals are stored
* @param archiveDir dir where wals are archived
* @param conf configuration to use
* @param listeners Listeners on WAL events. Listeners passed here will be registered before we do
* anything else; e.g. the Constructor {@link #rollWriter()}.
* @param failIfWALExists If true IOException will be thrown if files related to this wal already
* exist.
* @param prefix should always be hostname and port in distributed env and it will be URL encoded
* before being used. If prefix is null, "wal" will be used
* @param suffix will be url encoded. null is treated as empty. non-empty must start with
* {@link org.apache.hadoop.hbase.wal.AbstractFSWALProvider#WAL_FILE_NAME_DELIMITER}
*/
public FSHLog(final FileSystem fs, final Path rootDir, final String logDir,
final String archiveDir, final Configuration conf, final List<WALActionsListener> listeners,
final boolean failIfWALExists, final String prefix, final String suffix) throws IOException {
super(fs, rootDir, logDir, archiveDir, conf, listeners, failIfWALExists, prefix, suffix);
this.minTolerableReplication = conf.getInt("hbase.regionserver.hlog.tolerable.lowreplication",
FSUtils.getDefaultReplication(fs, this.walDir));
this.lowReplicationRollLimit = conf.getInt("hbase.regionserver.hlog.lowreplication.rolllimit",
5);
this.closeErrorsTolerated = conf.getInt("hbase.regionserver.logroll.errors.tolerated", 0);
// rollWriter sets this.hdfs_out if it can.
rollWriter();
// This is the 'writer' -- a single threaded executor. This single thread 'consumes' what is
// put on the ring buffer.
String hostingThreadName = Thread.currentThread().getName();
this.appendExecutor = Executors
.newSingleThreadExecutor(Threads.getNamedThreadFactory(hostingThreadName + ".append"));
// Preallocate objects to use on the ring buffer. The way that appends and syncs work, we will
// be stuck and make no progress if the buffer is filled with appends only and there is no
// sync. If no sync, then the handlers will be outstanding just waiting on sync completion
// before they return.
final int preallocatedEventCount = this.conf
.getInt("hbase.regionserver.wal.disruptor.event.count", 1024 * 16);
// Using BlockingWaitStrategy. Stuff that is going on here takes so long it makes no sense
// spinning as other strategies do.
this.disruptor = new Disruptor<RingBufferTruck>(RingBufferTruck.EVENT_FACTORY,
preallocatedEventCount, this.appendExecutor, ProducerType.MULTI,
new BlockingWaitStrategy());
// Advance the ring buffer sequence so that it starts from 1 instead of 0,
// because SyncFuture.NOT_DONE = 0.
this.disruptor.getRingBuffer().next();
int maxHandlersCount = conf.getInt(HConstants.REGION_SERVER_HANDLER_COUNT, 200);
this.ringBufferEventHandler = new RingBufferEventHandler(
conf.getInt("hbase.regionserver.hlog.syncer.count", 5), maxHandlersCount);
this.disruptor.handleExceptionsWith(new RingBufferExceptionHandler());
this.disruptor.handleEventsWith(new RingBufferEventHandler[] { this.ringBufferEventHandler });
// Starting up threads in constructor is a no no; Interface should have an init call.
this.disruptor.start();
}
/**
* Currently, we need to expose the writer's OutputStream to tests so that they can manipulate the
* default behavior (such as setting the maxRecoveryErrorCount value for example (see
* {@link AbstractTestWALReplay#testReplayEditsWrittenIntoWAL()}). This is done using reflection
* on the underlying HDFS OutputStream. NOTE: This could be removed once Hadoop1 support is
* removed.
* @return null if underlying stream is not ready.
*/
@VisibleForTesting
OutputStream getOutputStream() {
FSDataOutputStream fsdos = this.hdfs_out;
return fsdos != null ? fsdos.getWrappedStream() : null;
}
/**
* Run a sync after opening to set up the pipeline.
*/
private void preemptiveSync(final ProtobufLogWriter nextWriter) {
long startTimeNanos = System.nanoTime();
try {
nextWriter.sync();
postSync(System.nanoTime() - startTimeNanos, 0);
} catch (IOException e) {
// optimization failed, no need to abort here.
LOG.warn("pre-sync failed but an optimization so keep going", e);
}
}
/**
* This method allows subclasses to inject different writers without having to extend other
* methods like rollWriter().
* @return Writer instance
*/
protected Writer createWriterInstance(final Path path) throws IOException {
Writer writer = FSHLogProvider.createWriter(conf, fs, path, false);
if (writer instanceof ProtobufLogWriter) {
preemptiveSync((ProtobufLogWriter) writer);
}
return writer;
}
/**
* Used to manufacture race condition reliably. For testing only.
* @see #beforeWaitOnSafePoint()
*/
@VisibleForTesting
protected void afterCreatingZigZagLatch() {
}
/**
* @see #afterCreatingZigZagLatch()
*/
@VisibleForTesting
protected void beforeWaitOnSafePoint() {
};
@Override
protected void doAppend(Writer writer, FSWALEntry entry) throws IOException {
writer.append(entry);
}
@Override
protected long doReplaceWriter(Path oldPath, Path newPath, Writer nextWriter) throws IOException {
// Ask the ring buffer writer to pause at a safe point. Once we do this, the writer
// thread will eventually pause. An error hereafter needs to release the writer thread
// regardless -- hence the finally block below. Note, this method is called from the FSHLog
// constructor BEFORE the ring buffer is set running so it is null on first time through
// here; allow for that.
SyncFuture syncFuture = null;
SafePointZigZagLatch zigzagLatch = null;
long sequence = -1L;
if (this.ringBufferEventHandler != null) {
// Get sequence first to avoid dead lock when ring buffer is full
// Considering below sequence
// 1. replaceWriter is called and zigzagLatch is initialized
// 2. ringBufferEventHandler#onEvent is called and arrives at #attainSafePoint(long) then wait
// on safePointReleasedLatch
// 3. Since ring buffer is full, if we get sequence when publish sync, the replaceWriter
// thread will wait for the ring buffer to be consumed, but the only consumer is waiting
// replaceWriter thread to release safePointReleasedLatch, which causes a deadlock
sequence = getSequenceOnRingBuffer();
zigzagLatch = this.ringBufferEventHandler.attainSafePoint();
}
afterCreatingZigZagLatch();
long oldFileLen = 0L;
try {
// Wait on the safe point to be achieved. Send in a sync in case nothing has hit the
// ring buffer between the above notification of writer that we want it to go to
// 'safe point' and then here where we are waiting on it to attain safe point. Use
// 'sendSync' instead of 'sync' because we do not want this thread to block waiting on it
// to come back. Cleanup this syncFuture down below after we are ready to run again.
try {
if (zigzagLatch != null) {
// use assert to make sure no change breaks the logic that
// sequence and zigzagLatch will be set together
assert sequence > 0L : "Failed to get sequence from ring buffer";
Trace.addTimelineAnnotation("awaiting safepoint");
syncFuture = zigzagLatch.waitSafePoint(publishSyncOnRingBuffer(sequence));
}
} catch (FailedSyncBeforeLogCloseException e) {
// If unflushed/unsynced entries on close, it is reason to abort.
if (isUnflushedEntries()) {
throw e;
}
LOG.warn(
"Failed sync-before-close but no outstanding appends; closing WAL" + e.getMessage());
}
// It is at the safe point. Swap out writer from under the blocked writer thread.
// TODO: This is close is inline with critical section. Should happen in background?
if (this.writer != null) {
oldFileLen = this.writer.getLength();
try {
Trace.addTimelineAnnotation("closing writer");
this.writer.close();
Trace.addTimelineAnnotation("writer closed");
this.closeErrorCount.set(0);
} catch (IOException ioe) {
int errors = closeErrorCount.incrementAndGet();
if (!isUnflushedEntries() && (errors <= this.closeErrorsTolerated)) {
LOG.warn("Riding over failed WAL close of " + oldPath + ", cause=\"" + ioe.getMessage()
+ "\", errors=" + errors
+ "; THIS FILE WAS NOT CLOSED BUT ALL EDITS SYNCED SO SHOULD BE OK");
} else {
throw ioe;
}
}
}
this.writer = nextWriter;
if (nextWriter != null && nextWriter instanceof ProtobufLogWriter) {
this.hdfs_out = ((ProtobufLogWriter) nextWriter).getStream();
} else {
this.hdfs_out = null;
}
} catch (InterruptedException ie) {
// Perpetuate the interrupt
Thread.currentThread().interrupt();
} catch (IOException e) {
long count = getUnflushedEntriesCount();
LOG.error("Failed close of WAL writer " + oldPath + ", unflushedEntries=" + count, e);
throw new FailedLogCloseException(oldPath + ", unflushedEntries=" + count, e);
} finally {
// Let the writer thread go regardless, whether error or not.
if (zigzagLatch != null) {
zigzagLatch.releaseSafePoint();
// syncFuture will be null if we failed our wait on safe point above. Otherwise, if
// latch was obtained successfully, the sync we threw in either trigger the latch or it
// got stamped with an exception because the WAL was damaged and we could not sync. Now
// the write pipeline has been opened up again by releasing the safe point, process the
// syncFuture we got above. This is probably a noop but it may be stale exception from
// when old WAL was in place. Catch it if so.
if (syncFuture != null) {
try {
blockOnSync(syncFuture);
} catch (IOException ioe) {
if (LOG.isTraceEnabled()) {
LOG.trace("Stale sync exception", ioe);
}
}
}
}
}
return oldFileLen;
}
@Override
protected void doShutdown() throws IOException {
// Shutdown the disruptor. Will stop after all entries have been processed. Make sure we
// have stopped incoming appends before calling this else it will not shutdown. We are
// conservative below waiting a long time and if not elapsed, then halting.
if (this.disruptor != null) {
long timeoutms = conf.getLong("hbase.wal.disruptor.shutdown.timeout.ms", 60000);
try {
this.disruptor.shutdown(timeoutms, TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
LOG.warn("Timed out bringing down disruptor after " + timeoutms + "ms; forcing halt "
+ "(It is a problem if this is NOT an ABORT! -- DATALOSS!!!!)");
this.disruptor.halt();
this.disruptor.shutdown();
}
}
// With disruptor down, this is safe to let go.
if (this.appendExecutor != null) {
this.appendExecutor.shutdown();
}
if (LOG.isDebugEnabled()) {
LOG.debug("Closing WAL writer in " + FSUtils.getPath(walDir));
}
if (this.writer != null) {
this.writer.close();
this.writer = null;
}
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "NP_NULL_ON_SOME_PATH_EXCEPTION",
justification = "Will never be null")
@Override
public long append(final HRegionInfo hri,
final WALKey key, final WALEdit edits, final boolean inMemstore) throws IOException {
if (this.closed) {
throw new IOException("Cannot append; log is closed");
}
// Make a trace scope for the append. It is closed on other side of the ring buffer by the
// single consuming thread. Don't have to worry about it.
TraceScope scope = Trace.startSpan("FSHLog.append");
// This is crazy how much it takes to make an edit. Do we need all this stuff!!!!???? We need
// all this to make a key and then below to append the edit, we need to carry htd, info,
// etc. all over the ring buffer.
FSWALEntry entry = null;
long sequence = this.disruptor.getRingBuffer().next();
try {
RingBufferTruck truck = this.disruptor.getRingBuffer().get(sequence);
// Construction of FSWALEntry sets a latch. The latch is thrown just after we stamp the
// edit with its edit/sequence id.
// TODO: reuse FSWALEntry as we do SyncFuture rather create per append.
entry = new FSWALEntry(sequence, key, edits, hri, inMemstore);
truck.loadPayload(entry, scope.detach());
} finally {
this.disruptor.getRingBuffer().publish(sequence);
}
return sequence;
}
/**
* Thread to runs the hdfs sync call. This call takes a while to complete. This is the longest
* pole adding edits to the WAL and this must complete to be sure all edits persisted. We run
* multiple threads sync'ng rather than one that just syncs in series so we have better latencies;
* otherwise, an edit that arrived just after a sync started, might have to wait almost the length
* of two sync invocations before it is marked done.
* <p>
* When the sync completes, it marks all the passed in futures done. On the other end of the sync
* future is a blocked thread, usually a regionserver Handler. There may be more than one future
* passed in the case where a few threads arrive at about the same time and all invoke 'sync'. In
* this case we'll batch up the invocations and run one filesystem sync only for a batch of
* Handler sync invocations. Do not confuse these Handler SyncFutures with the futures an
* ExecutorService returns when you call submit. We have no use for these in this model. These
* SyncFutures are 'artificial', something to hold the Handler until the filesystem sync
* completes.
*/
private class SyncRunner extends HasThread {
private volatile long sequence;
// Keep around last exception thrown. Clear on successful sync.
private final BlockingQueue<SyncFuture> syncFutures;
/**
* UPDATE!
* @param syncs the batch of calls to sync that arrived as this thread was starting; when done,
* we will put the result of the actual hdfs sync call as the result.
* @param sequence The sequence number on the ring buffer when this thread was set running. If
* this actual writer sync completes then all appends up this point have been
* flushed/synced/pushed to datanodes. If we fail, then the passed in
* <code>syncs</code> futures will return the exception to their clients; some of the
* edits may have made it out to data nodes but we will report all that were part of
* this session as failed.
*/
SyncRunner(final String name, final int maxHandlersCount) {
super(name);
// LinkedBlockingQueue because of
// http://www.javacodegeeks.com/2010/09/java-best-practices-queue-battle-and.html
// Could use other blockingqueues here or concurrent queues.
//
// We could let the capacity be 'open' but bound it so we get alerted in pathological case
// where we cannot sync and we have a bunch of threads all backed up waiting on their syncs
// to come in. LinkedBlockingQueue actually shrinks when you remove elements so Q should
// stay neat and tidy in usual case. Let the max size be three times the maximum handlers.
// The passed in maxHandlerCount is the user-level handlers which is what we put up most of
// but HBase has other handlers running too -- opening region handlers which want to write
// the meta table when succesful (i.e. sync), closing handlers -- etc. These are usually
// much fewer in number than the user-space handlers so Q-size should be user handlers plus
// some space for these other handlers. Lets multiply by 3 for good-measure.
this.syncFutures = new LinkedBlockingQueue<SyncFuture>(maxHandlersCount * 3);
}
void offer(final long sequence, final SyncFuture[] syncFutures, final int syncFutureCount) {
// Set sequence first because the add to the queue will wake the thread if sleeping.
this.sequence = sequence;
for (int i = 0; i < syncFutureCount; ++i) {
this.syncFutures.add(syncFutures[i]);
}
}
/**
* Release the passed <code>syncFuture</code>
* @return Returns 1.
*/
private int releaseSyncFuture(final SyncFuture syncFuture, final long currentSequence,
final Throwable t) {
if (!syncFuture.done(currentSequence, t)) {
throw new IllegalStateException();
}
// This function releases one sync future only.
return 1;
}
/**
* Release all SyncFutures whose sequence is <= <code>currentSequence</code>.
* @param t May be non-null if we are processing SyncFutures because an exception was thrown.
* @return Count of SyncFutures we let go.
*/
private int releaseSyncFutures(final long currentSequence, final Throwable t) {
int syncCount = 0;
for (SyncFuture syncFuture; (syncFuture = this.syncFutures.peek()) != null;) {
if (syncFuture.getTxid() > currentSequence) {
break;
}
releaseSyncFuture(syncFuture, currentSequence, t);
if (!this.syncFutures.remove(syncFuture)) {
throw new IllegalStateException(syncFuture.toString());
}
syncCount++;
}
return syncCount;
}
/**
* @param sequence The sequence we ran the filesystem sync against.
* @return Current highest synced sequence.
*/
private long updateHighestSyncedSequence(long sequence) {
long currentHighestSyncedSequence;
// Set the highestSyncedSequence IFF our current sequence id is the 'highest'.
do {
currentHighestSyncedSequence = highestSyncedTxid.get();
if (currentHighestSyncedSequence >= sequence) {
// Set the sync number to current highwater mark; might be able to let go more
// queued sync futures
sequence = currentHighestSyncedSequence;
break;
}
} while (!highestSyncedTxid.compareAndSet(currentHighestSyncedSequence, sequence));
return sequence;
}
public void run() {
long currentSequence;
while (!isInterrupted()) {
int syncCount = 0;
SyncFuture takeSyncFuture;
try {
while (true) {
// We have to process what we 'take' from the queue
takeSyncFuture = this.syncFutures.take();
currentSequence = this.sequence;
long syncFutureSequence = takeSyncFuture.getTxid();
if (syncFutureSequence > currentSequence) {
throw new IllegalStateException("currentSequence=" + currentSequence
+ ", syncFutureSequence=" + syncFutureSequence);
}
// See if we can process any syncfutures BEFORE we go sync.
long currentHighestSyncedSequence = highestSyncedTxid.get();
if (currentSequence < currentHighestSyncedSequence) {
syncCount += releaseSyncFuture(takeSyncFuture, currentHighestSyncedSequence, null);
// Done with the 'take'. Go around again and do a new 'take'.
continue;
}
break;
}
// I got something. Lets run. Save off current sequence number in case it changes
// while we run.
TraceScope scope = Trace.continueSpan(takeSyncFuture.getSpan());
long start = System.nanoTime();
Throwable lastException = null;
try {
Trace.addTimelineAnnotation("syncing writer");
writer.sync();
Trace.addTimelineAnnotation("writer synced");
currentSequence = updateHighestSyncedSequence(currentSequence);
} catch (IOException e) {
LOG.error("Error syncing, request close of WAL", e);
lastException = e;
} catch (Exception e) {
LOG.warn("UNEXPECTED", e);
lastException = e;
} finally {
// reattach the span to the future before releasing.
takeSyncFuture.setSpan(scope.detach());
// First release what we 'took' from the queue.
syncCount += releaseSyncFuture(takeSyncFuture, currentSequence, lastException);
// Can we release other syncs?
syncCount += releaseSyncFutures(currentSequence, lastException);
if (lastException != null) {
requestLogRoll();
} else {
checkLogRoll();
}
}
postSync(System.nanoTime() - start, syncCount);
} catch (InterruptedException e) {
// Presume legit interrupt.
Thread.currentThread().interrupt();
} catch (Throwable t) {
LOG.warn("UNEXPECTED, continuing", t);
}
}
}
}
/**
* Schedule a log roll if needed.
*/
void checkLogRoll() {
// Will return immediately if we are in the middle of a WAL log roll currently.
if (!rollWriterLock.tryLock()) {
return;
}
boolean lowReplication;
try {
lowReplication = checkLowReplication();
} finally {
rollWriterLock.unlock();
}
try {
if (lowReplication || writer != null && writer.getLength() > logrollsize) {
requestLogRoll(lowReplication);
}
} catch (IOException e) {
LOG.warn("Writer.getLength() failed; continuing", e);
}
}
/**
* @return true if number of replicas for the WAL is lower than threshold
*/
private boolean checkLowReplication() {
boolean logRollNeeded = false;
// if the number of replicas in HDFS has fallen below the configured
// value, then roll logs.
try {
int numCurrentReplicas = getLogReplication();
if (numCurrentReplicas != 0 && numCurrentReplicas < this.minTolerableReplication) {
if (this.lowReplicationRollEnabled) {
if (this.consecutiveLogRolls.get() < this.lowReplicationRollLimit) {
LOG.warn("HDFS pipeline error detected. " + "Found " + numCurrentReplicas
+ " replicas but expecting no less than " + this.minTolerableReplication
+ " replicas. " + " Requesting close of WAL. current pipeline: "
+ Arrays.toString(getPipeline()));
logRollNeeded = true;
// If rollWriter is requested, increase consecutiveLogRolls. Once it
// is larger than lowReplicationRollLimit, disable the
// LowReplication-Roller
this.consecutiveLogRolls.getAndIncrement();
} else {
LOG.warn("Too many consecutive RollWriter requests, it's a sign of "
+ "the total number of live datanodes is lower than the tolerable replicas.");
this.consecutiveLogRolls.set(0);
this.lowReplicationRollEnabled = false;
}
}
} else if (numCurrentReplicas >= this.minTolerableReplication) {
if (!this.lowReplicationRollEnabled) {
// The new writer's log replicas is always the default value.
// So we should not enable LowReplication-Roller. If numEntries
// is lower than or equals 1, we consider it as a new writer.
if (this.numEntries.get() <= 1) {
return logRollNeeded;
}
// Once the live datanode number and the replicas return to normal,
// enable the LowReplication-Roller.
this.lowReplicationRollEnabled = true;
LOG.info("LowReplication-Roller was enabled.");
}
}
} catch (Exception e) {
LOG.warn("DFSOutputStream.getNumCurrentReplicas failed because of " + e + ", continuing...");
}
return logRollNeeded;
}
private SyncFuture publishSyncOnRingBuffer(long sequence) {
return publishSyncOnRingBuffer(sequence, null);
}
private long getSequenceOnRingBuffer() {
return this.disruptor.getRingBuffer().next();
}
private SyncFuture publishSyncOnRingBuffer(Span span) {
long sequence = this.disruptor.getRingBuffer().next();
return publishSyncOnRingBuffer(sequence, span);
}
private SyncFuture publishSyncOnRingBuffer(long sequence, Span span) {
// here we use ring buffer sequence as transaction id
SyncFuture syncFuture = getSyncFuture(sequence, span);
try {
RingBufferTruck truck = this.disruptor.getRingBuffer().get(sequence);
truck.loadPayload(syncFuture);
} finally {
this.disruptor.getRingBuffer().publish(sequence);
}
return syncFuture;
}
// Sync all known transactions
private Span publishSyncThenBlockOnCompletion(Span span) throws IOException {
return blockOnSync(publishSyncOnRingBuffer(span));
}
/**
* {@inheritDoc}
* <p>
* If the pipeline isn't started yet or is empty, you will get the default replication factor.
* Therefore, if this function returns 0, it means you are not properly running with the HDFS-826
* patch.
*/
@VisibleForTesting
int getLogReplication() {
try {
// in standalone mode, it will return 0
if (this.hdfs_out instanceof HdfsDataOutputStream) {
return ((HdfsDataOutputStream) this.hdfs_out).getCurrentBlockReplication();
}
} catch (IOException e) {
LOG.info("", e);
}
return 0;
}
@Override
public void sync() throws IOException {
TraceScope scope = Trace.startSpan("FSHLog.sync");
try {
scope = Trace.continueSpan(publishSyncThenBlockOnCompletion(scope.detach()));
} finally {
assert scope == NullScope.INSTANCE || !scope.isDetached();
scope.close();
}
}
@Override
public void sync(long txid) throws IOException {
if (this.highestSyncedTxid.get() >= txid) {
// Already sync'd.
return;
}
TraceScope scope = Trace.startSpan("FSHLog.sync");
try {
scope = Trace.continueSpan(publishSyncThenBlockOnCompletion(scope.detach()));
} finally {
assert scope == NullScope.INSTANCE || !scope.isDetached();
scope.close();
}
}
@Override
public void logRollerExited() {
}
@VisibleForTesting
boolean isLowReplicationRollEnabled() {
return lowReplicationRollEnabled;
}
public static final long FIXED_OVERHEAD = ClassSize
.align(ClassSize.OBJECT + (5 * ClassSize.REFERENCE) + ClassSize.ATOMIC_INTEGER
+ Bytes.SIZEOF_INT + (3 * Bytes.SIZEOF_LONG));
private static void split(final Configuration conf, final Path p) throws IOException {
FileSystem fs = FileSystem.get(conf);
if (!fs.exists(p)) {
throw new FileNotFoundException(p.toString());
}
if (!fs.getFileStatus(p).isDirectory()) {
throw new IOException(p + " is not a directory");
}
final Path baseDir = FSUtils.getRootDir(conf);
final Path archiveDir = new Path(baseDir, HConstants.HREGION_OLDLOGDIR_NAME);
WALSplitter.split(baseDir, p, archiveDir, fs, conf, WALFactory.getInstance(conf));
}
/**
* This class is used coordinating two threads holding one thread at a 'safe point' while the
* orchestrating thread does some work that requires the first thread paused: e.g. holding the WAL
* writer while its WAL is swapped out from under it by another thread.
* <p>
* Thread A signals Thread B to hold when it gets to a 'safe point'. Thread A wait until Thread B
* gets there. When the 'safe point' has been attained, Thread B signals Thread A. Thread B then
* holds at the 'safe point'. Thread A on notification that Thread B is paused, goes ahead and
* does the work it needs to do while Thread B is holding. When Thread A is done, it flags B and
* then Thread A and Thread B continue along on their merry way. Pause and signalling 'zigzags'
* between the two participating threads. We use two latches -- one the inverse of the other --
* pausing and signaling when states are achieved.
* <p>
* To start up the drama, Thread A creates an instance of this class each time it would do this
* zigzag dance and passes it to Thread B (these classes use Latches so it is one shot only).
* Thread B notices the new instance (via reading a volatile reference or how ever) and it starts
* to work toward the 'safe point'. Thread A calls {@link #waitSafePoint()} when it cannot proceed
* until the Thread B 'safe point' is attained. Thread A will be held inside in
* {@link #waitSafePoint()} until Thread B reaches the 'safe point'. Once there, Thread B frees
* Thread A by calling {@link #safePointAttained()}. Thread A now knows Thread B is at the 'safe
* point' and that it is holding there (When Thread B calls {@link #safePointAttained()} it blocks
* here until Thread A calls {@link #releaseSafePoint()}). Thread A proceeds to do what it needs
* to do while Thread B is paused. When finished, it lets Thread B lose by calling
* {@link #releaseSafePoint()} and away go both Threads again.
*/
static class SafePointZigZagLatch {
/**
* Count down this latch when safe point attained.
*/
private volatile CountDownLatch safePointAttainedLatch = new CountDownLatch(1);
/**
* Latch to wait on. Will be released when we can proceed.
*/
private volatile CountDownLatch safePointReleasedLatch = new CountDownLatch(1);
/**
* For Thread A to call when it is ready to wait on the 'safe point' to be attained. Thread A
* will be held in here until Thread B calls {@link #safePointAttained()}
* @param syncFuture We need this as barometer on outstanding syncs. If it comes home with an
* exception, then something is up w/ our syncing.
* @return The passed <code>syncFuture</code>
*/
SyncFuture waitSafePoint(final SyncFuture syncFuture) throws InterruptedException,
FailedSyncBeforeLogCloseException {
while (true) {
if (this.safePointAttainedLatch.await(1, TimeUnit.NANOSECONDS)) {
break;
}
if (syncFuture.isThrowable()) {
throw new FailedSyncBeforeLogCloseException(syncFuture.getThrowable());
}
}
return syncFuture;
}
/**
* Called by Thread B when it attains the 'safe point'. In this method, Thread B signals Thread
* A it can proceed. Thread B will be held in here until {@link #releaseSafePoint()} is called
* by Thread A.
*/
void safePointAttained() throws InterruptedException {
this.safePointAttainedLatch.countDown();
this.safePointReleasedLatch.await();
}
/**
* Called by Thread A when it is done with the work it needs to do while Thread B is halted.
* This will release the Thread B held in a call to {@link #safePointAttained()}
*/
void releaseSafePoint() {
this.safePointReleasedLatch.countDown();
}
/**
* @return True is this is a 'cocked', fresh instance, and not one that has already fired.
*/
boolean isCocked() {
return this.safePointAttainedLatch.getCount() > 0
&& this.safePointReleasedLatch.getCount() > 0;
}
}
/**
* Handler that is run by the disruptor ringbuffer consumer. Consumer is a SINGLE
* 'writer/appender' thread. Appends edits and starts up sync runs. Tries its best to batch up
* syncs. There is no discernible benefit batching appends so we just append as they come in
* because it simplifies the below implementation. See metrics for batching effectiveness (In
* measurement, at 100 concurrent handlers writing 1k, we are batching > 10 appends and 10 handler
* sync invocations for every actual dfsclient sync call; at 10 concurrent handlers, YMMV).
* <p>
* Herein, we have an array into which we store the sync futures as they come in. When we have a
* 'batch', we'll then pass what we have collected to a SyncRunner thread to do the filesystem
* sync. When it completes, it will then call {@link SyncFuture#done(long, Throwable)} on each of
* SyncFutures in the batch to release blocked Handler threads.
* <p>
* I've tried various effects to try and make latencies low while keeping throughput high. I've
* tried keeping a single Queue of SyncFutures in this class appending to its tail as the syncs
* coming and having sync runner threads poll off the head to 'finish' completed SyncFutures. I've
* tried linkedlist, and various from concurrent utils whether LinkedBlockingQueue or
* ArrayBlockingQueue, etc. The more points of synchronization, the more 'work' (according to
* 'perf stats') that has to be done; small increases in stall percentages seem to have a big
* impact on throughput/latencies. The below model where we have an array into which we stash the
* syncs and then hand them off to the sync thread seemed like a decent compromise. See HBASE-8755
* for more detail.
*/
class RingBufferEventHandler implements EventHandler<RingBufferTruck>, LifecycleAware {
private final SyncRunner[] syncRunners;
private final SyncFuture[] syncFutures;
// Had 'interesting' issues when this was non-volatile. On occasion, we'd not pass all
// syncFutures to the next sync'ing thread.
private volatile int syncFuturesCount = 0;
private volatile SafePointZigZagLatch zigzagLatch;
/**
* Set if we get an exception appending or syncing so that all subsequence appends and syncs on
* this WAL fail until WAL is replaced.
*/
private Exception exception = null;
/**
* Object to block on while waiting on safe point.
*/
private final Object safePointWaiter = new Object();
private volatile boolean shutdown = false;
/**
* Which syncrunner to use next.
*/
private int syncRunnerIndex;
RingBufferEventHandler(final int syncRunnerCount, final int maxHandlersCount) {
this.syncFutures = new SyncFuture[maxHandlersCount];
this.syncRunners = new SyncRunner[syncRunnerCount];
for (int i = 0; i < syncRunnerCount; i++) {
this.syncRunners[i] = new SyncRunner("sync." + i, maxHandlersCount);
}
}
private void cleanupOutstandingSyncsOnException(final long sequence, final Exception e) {
// There could be handler-count syncFutures outstanding.
for (int i = 0; i < this.syncFuturesCount; i++) {
this.syncFutures[i].done(sequence, e);
}
this.syncFuturesCount = 0;
}
/**
* @return True if outstanding sync futures still
*/
private boolean isOutstandingSyncs() {
for (int i = 0; i < this.syncFuturesCount; i++) {
if (!this.syncFutures[i].isDone()) {
return true;
}
}
return false;
}
@Override
// We can set endOfBatch in the below method if at end of our this.syncFutures array
public void onEvent(final RingBufferTruck truck, final long sequence, boolean endOfBatch)
throws Exception {
// Appends and syncs are coming in order off the ringbuffer. We depend on this fact. We'll
// add appends to dfsclient as they come in. Batching appends doesn't give any significant
// benefit on measurement. Handler sync calls we will batch up. If we get an exception
// appending an edit, we fail all subsequent appends and syncs with the same exception until
// the WAL is reset. It is important that we not short-circuit and exit early this method.
// It is important that we always go through the attainSafePoint on the end. Another thread,
// the log roller may be waiting on a signal from us here and will just hang without it.
try {
if (truck.hasSyncFuturePayload()) {
this.syncFutures[this.syncFuturesCount++] = truck.unloadSyncFuturePayload();
// Force flush of syncs if we are carrying a full complement of syncFutures.
if (this.syncFuturesCount == this.syncFutures.length) {
endOfBatch = true;
}
} else if (truck.hasFSWALEntryPayload()) {
TraceScope scope = Trace.continueSpan(truck.unloadSpanPayload());
try {
FSWALEntry entry = truck.unloadFSWALEntryPayload();
if (this.exception != null) {
// We got an exception on an earlier attempt at append. Do not let this append
// go through. Fail it but stamp the sequenceid into this append though failed.
// We need to do this to close the latch held down deep in WALKey...that is waiting
// on sequenceid assignment otherwise it will just hang out (The #append method
// called below does this also internally).
entry.stampRegionSequenceId();
// Return to keep processing events coming off the ringbuffer
return;
}
append(entry);
} catch (Exception e) {
// Failed append. Record the exception.
this.exception = e;
// Return to keep processing events coming off the ringbuffer
return;
} finally {
assert scope == NullScope.INSTANCE || !scope.isDetached();
scope.close(); // append scope is complete
}
} else {
// What is this if not an append or sync. Fail all up to this!!!
cleanupOutstandingSyncsOnException(sequence,
new IllegalStateException("Neither append nor sync"));
// Return to keep processing.
return;
}
// TODO: Check size and if big go ahead and call a sync if we have enough data.
// This is a sync. If existing exception, fall through. Else look to see if batch.
if (this.exception == null) {
// If not a batch, return to consume more events from the ring buffer before proceeding;
// we want to get up a batch of syncs and appends before we go do a filesystem sync.
if (!endOfBatch || this.syncFuturesCount <= 0) {
return;
}
// syncRunnerIndex is bound to the range [0, Integer.MAX_INT - 1] as follows:
// * The maximum value possible for syncRunners.length is Integer.MAX_INT
// * syncRunnerIndex starts at 0 and is incremented only here
// * after the increment, the value is bounded by the '%' operator to
// [0, syncRunners.length), presuming the value was positive prior to
// the '%' operator.
// * after being bound to [0, Integer.MAX_INT - 1], the new value is stored in
// syncRunnerIndex ensuring that it can't grow without bound and overflow.
// * note that the value after the increment must be positive, because the most it
// could have been prior was Integer.MAX_INT - 1 and we only increment by 1.
this.syncRunnerIndex = (this.syncRunnerIndex + 1) % this.syncRunners.length;
try {
// Below expects that the offer 'transfers' responsibility for the outstanding syncs to
// the syncRunner. We should never get an exception in here.
this.syncRunners[this.syncRunnerIndex].offer(sequence, this.syncFutures,
this.syncFuturesCount);
} catch (Exception e) {
// Should NEVER get here.
requestLogRoll();
this.exception = new DamagedWALException("Failed offering sync", e);
}
}
// We may have picked up an exception above trying to offer sync
if (this.exception != null) {
cleanupOutstandingSyncsOnException(sequence, this.exception instanceof DamagedWALException
? this.exception : new DamagedWALException("On sync", this.exception));
}
attainSafePoint(sequence);
this.syncFuturesCount = 0;
} catch (Throwable t) {
LOG.error("UNEXPECTED!!! syncFutures.length=" + this.syncFutures.length, t);
}
}
SafePointZigZagLatch attainSafePoint() {
this.zigzagLatch = new SafePointZigZagLatch();
return this.zigzagLatch;
}
/**
* Check if we should attain safe point. If so, go there and then wait till signalled before we
* proceeding.
*/
private void attainSafePoint(final long currentSequence) {
if (this.zigzagLatch == null || !this.zigzagLatch.isCocked()) {
return;
}
// If here, another thread is waiting on us to get to safe point. Don't leave it hanging.
beforeWaitOnSafePoint();
try {
// Wait on outstanding syncers; wait for them to finish syncing (unless we've been
// shutdown or unless our latch has been thrown because we have been aborted or unless
// this WAL is broken and we can't get a sync/append to complete).
while (!this.shutdown && this.zigzagLatch.isCocked()
&& highestSyncedTxid.get() < currentSequence &&
// We could be in here and all syncs are failing or failed. Check for this. Otherwise
// we'll just be stuck here for ever. In other words, ensure there syncs running.
isOutstandingSyncs()) {
synchronized (this.safePointWaiter) {
this.safePointWaiter.wait(0, 1);
}
}
// Tell waiting thread we've attained safe point. Can clear this.throwable if set here
// because we know that next event through the ringbuffer will be going to a new WAL
// after we do the zigzaglatch dance.
this.exception = null;
this.zigzagLatch.safePointAttained();
} catch (InterruptedException e) {
LOG.warn("Interrupted ", e);
Thread.currentThread().interrupt();
}
}
/**
* Append to the WAL. Does all CP and WAL listener calls.
*/
void append(final FSWALEntry entry) throws Exception {
try {
FSHLog.this.append(writer, entry);
} catch (Exception e) {
String msg = "Append sequenceId=" + entry.getKey().getSequenceId()
+ ", requesting roll of WAL";
LOG.warn(msg, e);
requestLogRoll();
throw new DamagedWALException(msg, e);
}
}
@Override
public void onStart() {
for (SyncRunner syncRunner : this.syncRunners) {
syncRunner.start();
}
}
@Override
public void onShutdown() {
for (SyncRunner syncRunner : this.syncRunners) {
syncRunner.interrupt();
}
}
}
private static void usage() {
System.err.println("Usage: FSHLog <ARGS>");
System.err.println("Arguments:");
System.err.println(" --dump Dump textual representation of passed one or more files");
System.err.println(" For example: "
+ "FSHLog --dump hdfs://example.com:9000/hbase/.logs/MACHINE/LOGFILE");
System.err.println(" --split Split the passed directory of WAL logs");
System.err.println(
" For example: " + "FSHLog --split hdfs://example.com:9000/hbase/.logs/DIR");
}
/**
* Pass one or more log file names and it will either dump out a text version on
* <code>stdout</code> or split the specified log files.
*/
public static void main(String[] args) throws IOException {
if (args.length < 2) {
usage();
System.exit(-1);
}
// either dump using the WALPrettyPrinter or split, depending on args
if (args[0].compareTo("--dump") == 0) {
WALPrettyPrinter.run(Arrays.copyOfRange(args, 1, args.length));
} else if (args[0].compareTo("--perf") == 0) {
LOG.fatal("Please use the WALPerformanceEvaluation tool instead. i.e.:");
LOG.fatal(
"\thbase org.apache.hadoop.hbase.wal.WALPerformanceEvaluation --iterations " + args[1]);
System.exit(-1);
} else if (args[0].compareTo("--split") == 0) {
Configuration conf = HBaseConfiguration.create();
for (int i = 1; i < args.length; i++) {
try {
Path logPath = new Path(args[i]);
FSUtils.setFsDefault(conf, logPath);
split(conf, logPath);
} catch (IOException t) {
t.printStackTrace(System.err);
System.exit(-1);
}
}
} else {
usage();
System.exit(-1);
}
}
/**
* This method gets the pipeline for the current WAL.
*/
@Override
DatanodeInfo[] getPipeline() {
if (this.hdfs_out != null) {
if (this.hdfs_out.getWrappedStream() instanceof DFSOutputStream) {
return ((DFSOutputStream) this.hdfs_out.getWrappedStream()).getPipeline();
}
}
return new DatanodeInfo[0];
}
}
|
3e0e82b4f36769815250a5ff81ce676105480a7b | 1,374 | java | Java | UserMangementApp/src/main/java/com/lti/controller/EditServlet.java | Ashleshk/Java-Online-Coding | 9955035bfb98c34f98e079a6ed7a7e0718e45655 | [
"MIT"
] | 1 | 2020-12-17T06:51:22.000Z | 2020-12-17T06:51:22.000Z | UserMangementApp/src/main/java/com/lti/controller/EditServlet.java | Ashleshk/Java-Online-Coding | 9955035bfb98c34f98e079a6ed7a7e0718e45655 | [
"MIT"
] | null | null | null | UserMangementApp/src/main/java/com/lti/controller/EditServlet.java | Ashleshk/Java-Online-Coding | 9955035bfb98c34f98e079a6ed7a7e0718e45655 | [
"MIT"
] | null | null | null | 27.48 | 119 | 0.783115 | 6,159 | package com.lti.controller;
import java.io.IOException;
import java.sql.SQLException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.lti.dao.UserInfoDao;
import com.lti.dao.UserInfoDaoImpl;
import com.lti.model.UserInfo;
/**
* Servlet implementation class EditServlet
*/
@WebServlet("/edit")
public class EditServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private UserInfoDao userDAO;
public void init() {
userDAO = new UserInfoDaoImpl();
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request, response);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
int id = Integer.parseInt(request.getParameter("userId"));
UserInfo existingUser = userDAO.findUserById(id);
System.out.println(id + " "+existingUser.getUserName()+" "+existingUser.getUserEmail());
RequestDispatcher dispatcher = request.getRequestDispatcher("Update.jsp");
request.setAttribute("user", existingUser);
dispatcher.forward(request, response);
}
}
|
3e0e82c07e0b06e8531263bc4e2383939165ec82 | 20,025 | java | Java | Inuktitut-Java/morph/Decomposition.java | mark-walle/InuktitutComputing | 1f96b7b1b7270f5068cd23074be8ef9b95190d88 | [
"Unlicense",
"BSD-3-Clause"
] | 5 | 2015-08-02T14:09:57.000Z | 2021-11-16T21:49:03.000Z | Inuktitut-Java/morph/Decomposition.java | mark-walle/InuktitutComputing | 1f96b7b1b7270f5068cd23074be8ef9b95190d88 | [
"Unlicense",
"BSD-3-Clause"
] | null | null | null | Inuktitut-Java/morph/Decomposition.java | mark-walle/InuktitutComputing | 1f96b7b1b7270f5068cd23074be8ef9b95190d88 | [
"Unlicense",
"BSD-3-Clause"
] | 4 | 2017-02-15T18:41:18.000Z | 2021-01-06T20:42:23.000Z | 40.95092 | 116 | 0.455431 | 6,160 | //@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
//
// -----------------------------------------------------------------------
// (c) Conseil national de recherches Canada, 2002
// (c) National Research Council of Canada, 2002
// -----------------------------------------------------------------------
// -----------------------------------------------------------------------
// Document/File: Decomposition.java
//
// Type/File type: code Java / Java code
//
// Auteur/Author: Benoit Farley
//
// Organisation/Organization: Conseil national de recherches du Canada/
// National Research Council Canada
//
// Date de cr�ation/Date of creation:
//
// Description: Classe d�crivant un terme d�compos� en ses diverses
// parties: base de mot et suffixes.
//
// -----------------------------------------------------------------------
package morph;
import java.util.Arrays;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import data.Affix;
import data.Base;
import data.Morpheme;
import script.Orthography;
import utilities.Debugging;
// Decomposition:
// String word
// MorceauRacine stem:
// Base racine:
// ...
// String terme
// int position
// String niveau
// Object [] morphParts:
// MorceauAffixe morceau:
// SurfaceFormOfAffix form
//
public class Decomposition extends Object implements Comparable {
String word;
RootPartOfComposition stem;
Object[] morphParts;
// Au moment de la cr�ation d'un objet Decomposition, on d�termine
// la cha�ne de caract�res � l'int�rieur du mot pour chaque morceau,
// � partir de la position sauvegard�e dans l'objet MorceauAffixe
// lors de la d�composition.
public Decomposition(String word, RootPartOfComposition r, Object[] parts) {
this.word = word;
stem = r;
String origState = stem.arc.startState.id;
int nextPos = word.length();
for (int i = parts.length - 1; i >= 0; i--) {
AffixPartOfComposition m = (AffixPartOfComposition) parts[i];
int pos = m.getPosition();
m.setTerme(
Orthography.orthographyICI(word.substring(pos, nextPos), false) );
nextPos = pos;
}
for (int i=0; i<parts.length; i++) {
// 'arc' de chaque morceau
AffixPartOfComposition m = (AffixPartOfComposition) parts[i];
for (int j=0; j<m.arcs.length; j++) {
if (m.arcs[j].destState.id.equals(origState)) {
m.arc = m.arcs[j];
origState = m.arc.startState.id;
break;
}
}
}
morphParts = parts;
//stem.terme = word.substring(0,nextPos);
}
public RootPartOfComposition getRootMorphpart() {
return stem;
}
public Object[] getMorphParts() {
return morphParts;
}
public void setMorphParts(Object [] parts) {
morphParts = parts;
}
public AffixPartOfComposition getLastMorphpart() {
if (morphParts.length==0)
return null;
else
return (AffixPartOfComposition)morphParts[morphParts.length-1];
}
public int getNbMorphparts() {
return morphParts.length;
}
// // - Les racines les plus longues en premier
// // - Le nombre mininum de morphParts
// // - Les racines connues en premier
// public int compareTo(Object a) {
// int valeurRetour = 0;
// Decomposition otherDec = (Decomposition) a;
// boolean known = ((Base) stem.getRoot()).known;
// boolean otherDecConnue = ((Base) otherDec.stem.getRoot()).known;
// if ((known && otherDecConnue) || (!known && !otherDecConnue))
// valeurRetour = 0;
// else if (known && !otherDecConnue)
// valeurRetour = -1;
// else if (!known && otherDecConnue)
// valeurRetour = 1;
// if (valeurRetour == 0) {
// Integer lengthOfRoot =
// new Integer(((Base) stem.getRoot()).morpheme.length());
// Integer lengthOfRootOfOtherDec =
// new Integer(
// ((Base) otherDec.stem.getRoot()).morpheme.length());
// valeurRetour = lengthOfRoot.compareTo(lengthOfRootOfOtherDec);
// if (valeurRetour == 0) {
// Integer nbOfMorphparts = new Integer(morphParts.length);
// Integer nbOfMorphpartsOfOtherDec =
// new Integer(otherDec.morphParts.length);
// valeurRetour = nbOfMorphparts.compareTo(nbOfMorphpartsOfOtherDec);
// }
// }
// return valeurRetour;
// }
// - Les racines connues en premier
// - Les racines les plus longues
// - Le nombre mininum de morphParts en premier
public int compareTo(Object obj) {
int returnValue = 0;
Decomposition otherDec = (Decomposition) obj;
// boolean known = ((Base) stem.getRoot()).known;
// boolean otherDecConnue = ((Base) otherDec.stem.getRoot()).known;
// if ((known && otherDecConnue) || (!known && !otherDecConnue))
// returnValue = 0;
// else if (known && !otherDecConnue)
// returnValue = -1;
// else if (!known && otherDecConnue)
// returnValue = 1;
if (returnValue == 0) {
Integer lengthOfRoot =
new Integer(((Base) stem.getRoot()).morpheme.length());
Integer lengthOfRootOfOtherDec =
new Integer(
((Base) otherDec.stem.getRoot()).morpheme.length());
returnValue = lengthOfRootOfOtherDec.compareTo(lengthOfRoot);
if (returnValue == 0) {
Integer nbOfMorphparts = new Integer(morphParts.length);
Integer nbOfMorphpartsOfOtherDec = new Integer(otherDec.morphParts.length);
returnValue = nbOfMorphparts.compareTo(nbOfMorphpartsOfOtherDec);
}
}
return returnValue;
}
public boolean isEqualDecomposition(Decomposition dec) {
if (this.toStr2().equals(dec.toStr2()))
return true;
else
return false;
}
// Note: � faire avec des HashSet: plus rapide probablement.
static Decomposition[] removeMultiples(Decomposition[] decs) {
if (decs == null || decs.length == 0)
return decs;
Vector v = new Vector();
Vector vc = new Vector();
v.add(decs[0]);
vc.add(decs[0].toStr2());
for (int i = 1; i < decs.length; i++) {
String c = decs[i].toStr2();
if (!vc.contains(c)) {
v.add(decs[i]);
vc.add(c);
}
}
return (Decomposition[]) v.toArray(new Decomposition[] {
});
}
// �liminer les d�compositions qui contiennent une suite de suffixes
// pour laquelle il existe un suffixe compos�, pour ne garder que
// la d�compositions dans laquelle se trouve le suffixe compos�.
static Decomposition[] removeCombinedSuffixes(Decomposition decs[]) {
Object[][] objs = new Object[decs.length][2];
for (int i = 0; i < decs.length; i++) {
objs[i][0] = decs[i];
objs[i][1] = new Boolean(true);
}
// Pour chaque d�composition qui contient un suffixe combin�:
for (int i = 0; i < objs.length; i++) {
if (((Boolean) objs[i][1]).booleanValue()) {
// On ne consid�re que les d�compositions qui n'ont pas
// �t� rejet�es.
Decomposition dec = (Decomposition) objs[i][0];
// Morph�mes de cette d�composition
Vector vParts = new Vector(Arrays.asList(dec.morphParts));
vParts.add(0,dec.stem);
// Pour chaque morph�me combin�, trouver celui qui le pr�c�de,
// celui qui le suit, et v�rifier dans les autres
// d�compositions retenues si ces deux morph�mes limites
// contiennent les �l�ments du morph�me combin�. Si c'est
// le cas, on rej�te ces d�compositions.
for (int j = 0; j < vParts.size(); j++) {
PartOfComposition morphPart = (PartOfComposition) vParts.elementAt(j);
Morpheme morph = morphPart.getMorpheme();
String cs[] = null;
if (morph != null)
cs = morph.getCombiningParts();
// Seulement pour les morph�mes combin�s.
if (cs != null) {
// Trouver les d�compositions qui ont les �l�ments du
// suffixe combin� flanqu�s de part et d'autre par les
// m�mes morph�mes, et les enlever de la liste.
String prec, follow;
// Morph�me pr�c�dant le morph�me combin�.
if (j == 0)
prec = null;
else if (j == 1)
prec = dec.stem.root.id;
else
prec = ((PartOfComposition) vParts.elementAt(j - 1)).getMorpheme().id;
// Morph�me suivant le morph�me combin�.
if (j == vParts.size() - 1)
follow = null;
else
follow = ((PartOfComposition) vParts.elementAt(j + 1)).getMorpheme().id;
// V�rifier dans les d�compositions retenues.
int k = 0;
while (k < objs.length) {
// D�compositions retenues seulement.
if (((Boolean) objs[k][1]).booleanValue()) {
Decomposition deck = (Decomposition) objs[k][0];
Object morphPartsk[] = (Object[]) deck.morphParts;
Vector vPartsk = new Vector(Arrays.asList(deck.morphParts));
vPartsk.add(0,deck.stem);
int l = 0;
boolean cont = true;
boolean inCombined = false;
int iCombined = 0;
// Analyser chaque morph�me de cette
// d�composition pour v�rifier s'il
// correspond � un �l�ment du morph�me
// combin�.
while (l < vPartsk.size() && cont) {
// MorceauAffixe mck = (MorceauAffixe) morphPartsk[l];
// Affix affk = mck.getAffix();
Morpheme morphk = ((PartOfComposition)vPartsk.elementAt(l)).getMorpheme();
if (inCombined) {
// On a d�j� d�termin� qu'un ou
// plusieurs morph�mes corres-
// pondent aux �l�ments du morph�me
// combin�. V�rifier celui-ci.
// if (affk.id.equals(cs[iCombined])) {
if (morphk.id.equals(cs[iCombined])) {
// C'est aussi un �l�ment du
// morph�me combin�.
iCombined++;
if (iCombined == cs.length) {
// C'est le dernier �l�ment du
// morph�me combin�. V�rifer
// si le morph�me qui le suit
// est le m�me que le morph�me
// suivant le morph�me combin�.
// Si c'est le cas, on rej�te
// cette d�composition. De
// toute fa�on, on arr�te cette
// v�rification.
String followk;
if (l == vPartsk.size() - 1)
followk = null;
else
followk = ((PartOfComposition) vPartsk.elementAt(l + 1))
.getMorpheme().id;
if ( (follow==null && followk==null) ||
(follow!=null && followk!=null && followk.equals(follow)) )
// rejeter cette d�composition
objs[k][1] = new Boolean(
false);
cont = false;
}
} else {
// Ce n'est pas un �l�ment du
// morph�me combin�. On remet � 0.
inCombined = false;
iCombined = 0;
}
} else {
// On n'a pas encore reconnu un
// morph�me comme premier �l�ment du
// morph�me combin�. Est-ce que celui-
// ci l'est?
if (morphk.id.equals(cs[iCombined])) {
// Premier �l�ment du morph�me
// combin�.
inCombined = true;
iCombined++;
String preck;
// V�rifier si le morph�me qui le
// pr�c�de est le m�me que le
// morph�me qui pr�c�de le morph�me
// combin�. Si c'est le cas, on
// continue la v�rification. Sinon
// on arr�te la v�rification de
// cette d�composition.
if (l == 0)
preck = null;
else if (l == 1)
preck = deck.stem.root.id;
else
preck = ((PartOfComposition) vPartsk.elementAt(l - 1))
.getMorpheme().id;
if ( (preck == null && prec != null) ||
(preck != null && prec == null) ||
(preck != null && prec != null && !preck.equals(prec)))
cont = false;
}
}
l++;
}
}
k++;
}
}
}
}
}
Vector v = new Vector();
for (int i = 0; i < objs.length; i++)
if (((Boolean) objs[i][1]).booleanValue())
v.add(objs[i][0]);
Decomposition ndecs[] = (Decomposition[]) v
.toArray(new Decomposition[] {});
return ndecs;
}
/*
* --------------------------------------------------------------------
* �criture d'une d�composition
* --------------------------------------------------------------------
*/
static String startDelimitor = "{";
static String endDelimitor = "}";
static String interDelimitor = ":";
/*
* {<forme de surface>:<signature du morph�me>}...
*/
public String toStr2() {
StringBuffer sb = new StringBuffer();
Object[] morphParts = getMorphParts();
sb.append(stem.toStr());
for (int j = 0; j < morphParts.length; j++) {
AffixPartOfComposition ma = (AffixPartOfComposition) morphParts[j];
// sb.append("|");
sb.append(ma.toStr());
}
return sb.toString();
}
static public String[] getMeaningsInArrayOfStrings (String decstr, String lang,
boolean includeSurface, boolean includeId) {
DecompositionExpression de = new DecompositionExpression(decstr);
String mngs[] = de.getMeanings(lang);
for (int i=0; i<mngs.length; i++) {
if (includeSurface && includeId)
mngs[i] = de.parts[i].str+ "---" + mngs[i];
else if (includeSurface)
mngs[i] = de.parts[i].surface+ "---" + mngs[i];
else if (includeId)
mngs[i] = de.parts[i].morphid+ "---" + mngs[i];
}
return mngs;
}
static public String getMeaningsInString (String decstr, String lang,
boolean includeSurface, boolean includeId) {
DecompositionExpression de = new DecompositionExpression(decstr);
StringBuffer sb = new StringBuffer();
String mngs[] = de.getMeanings(lang);
for (int i=0; i<mngs.length; i++)
if (includeSurface && includeId)
sb.append("{").append(de.parts[i].str).append("---").append(mngs[i]).append("}");
else if (includeSurface)
sb.append("{").append(de.parts[i].surface).append("---").append(mngs[i]).append("}");
else if (includeId)
sb.append("{").append(de.parts[i].morphid).append("---").append(mngs[i]).append("}");
else
sb.append("{").append(mngs[i]).append("}");
return sb.toString();
}
//----------------------------------------------------------------------------------------------
/*
* {<forme de surface>:<signature du morphème>}{...}...
*/
static public class DecompositionExpression {
//
String decstr;
String partsStr[];
DecPart parts[];
//
public DecompositionExpression (String decstr) {
this.decstr = decstr;
partsStr = expr2parts();
parts = new DecPart[partsStr.length];
for (int i=0; i<parts.length; i++)
parts[i] = new DecPart(partsStr[i]);
}
public String[] getMeanings(String lang) {
String meanings[] = new String[parts.length];
for (int i=0; i<parts.length; i++) {
meanings[i] =
lang.equals("en")?
Morpheme.getMorpheme(parts[i].morphid).englishMeaning
:
Morpheme.getMorpheme(parts[i].morphid).frenchMeaning;
meanings[i] = meanings[i].replaceAll(" /", " ");
meanings[i] = meanings[i].replace("^/", "");
}
return meanings;
}
private String[] expr2parts() {
Pattern p = Pattern.compile("\\{[^}]+?\\}");
Matcher mp = p.matcher(decstr);
Vector v = new Vector();
int pos=0;
while (mp.find(pos)) {
v.add(mp.group());
pos = mp.end();
}
return (String[])v.toArray(new String[]{});
}
static public class DecPart {
//
String str;
String surface;
String morphid;
//
public DecPart (String str) {
this.str = str;
Pattern p = Pattern.compile("\\"+startDelimitor+"(.+?)"+"\\"+endDelimitor);
Matcher m = p.matcher(str);
m.matches();
String[] partParts = Pattern.compile(":").split(m.group(1));
surface = partParts[0];
morphid = partParts[1];
}
public DecPart (String terme, String id) {
surface = terme;
morphid = id;
str = startDelimitor + surface + interDelimitor + id + endDelimitor;
}
}
}
}
|
3e0e82c5389a597ba9cf44ed433ec7b083fcca32 | 435 | java | Java | saturn-console-api/src/main/java/com/vip/saturn/job/console/service/UtilsService.java | universefeeler/Saturn | ff06233d7cd9e7bc49f28e5ac6619fe30727da5d | [
"Apache-2.0"
] | 1 | 2018-12-04T04:03:35.000Z | 2018-12-04T04:03:35.000Z | saturn-console-api/src/main/java/com/vip/saturn/job/console/service/UtilsService.java | universefeeler/Saturn | ff06233d7cd9e7bc49f28e5ac6619fe30727da5d | [
"Apache-2.0"
] | 1 | 2018-08-08T02:30:18.000Z | 2018-08-08T02:30:18.000Z | saturn-console-api/src/main/java/com/vip/saturn/job/console/service/UtilsService.java | universefeeler/Saturn | ff06233d7cd9e7bc49f28e5ac6619fe30727da5d | [
"Apache-2.0"
] | 1 | 2021-11-22T06:01:57.000Z | 2021-11-22T06:01:57.000Z | 24.166667 | 104 | 0.816092 | 6,161 | package com.vip.saturn.job.console.service;
import com.vip.saturn.job.console.domain.ForecastCronResult;
import com.vip.saturn.job.console.exception.SaturnJobConsoleException;
import java.util.List;
/**
* @author hebelala
*/
public interface UtilsService {
ForecastCronResult checkAndForecastCron(String timeZone, String cron) throws SaturnJobConsoleException;
List<String> getTimeZones() throws SaturnJobConsoleException;
}
|
3e0e82cd45f72713e1f779d07429947e416023d8 | 11,739 | java | Java | lucky-noxml/src/main/java/com/lucky/jacklamb/sqlcore/mapper/jpa/JpaSample.java | FK7075/lucky-noxml | 9f2f479d88732c92bb16f4194759c5fd39eb6464 | [
"WTFPL"
] | 3 | 2020-04-15T15:17:37.000Z | 2020-08-31T01:45:57.000Z | lucky-noxml/src/main/java/com/lucky/jacklamb/sqlcore/mapper/jpa/JpaSample.java | FK7075/lucky-noxml | 9f2f479d88732c92bb16f4194759c5fd39eb6464 | [
"WTFPL"
] | 18 | 2020-04-02T09:17:06.000Z | 2022-02-01T01:03:48.000Z | lucky-noxml/src/main/java/com/lucky/jacklamb/sqlcore/mapper/jpa/JpaSample.java | FK7075/lucky-noxml | 9f2f479d88732c92bb16f4194759c5fd39eb6464 | [
"WTFPL"
] | 1 | 2021-06-17T08:45:06.000Z | 2021-06-17T08:45:06.000Z | 41.189474 | 202 | 0.573132 | 6,162 | package com.lucky.jacklamb.sqlcore.mapper.jpa;
import com.google.gson.reflect.TypeToken;
import com.lucky.jacklamb.sqlcore.util.PojoManage;
import com.lucky.jacklamb.utils.base.LuckyUtils;
import com.lucky.jacklamb.utils.file.Resources;
import com.lucky.jacklamb.utils.reflect.ClassUtils;
import com.lucky.jacklamb.utils.regula.Regular;
import java.lang.reflect.Field;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author fk7075
* @version 1.0.0
* @date 2020/6/29 12:48 下午
*
* findBy语法解析流程
* eg: findByNameStartingWithAndAgeAndPriceBetween
* 1.去掉前缀
* NameStartingWithAndAgeAndPriceBetween
* 2.将find表达式中包含的实体属性全部替换为 "="
* =StartingWithAnd=And=Between
* 3.使用编码表[find-coding.json]将2得到的表达式编码(以SQL运算符的长度降序为顺序)
* =@21@01=@01=@05
* 4.分离SQL运算符
* [@21@01,@01,@05]
* 5.分离出2中的原始SQL运算符
* =StartingWithAnd=And=Between --> [StartingWithAnd,And,Between]
* 6.使用"="替换原始表达式中的[StartingWithAnd,And,Between]部分
* NameStartingWithAndAgeAndPriceBetween -->Name=Age=Price
* 7.分离出参数终结符
* Name=Age=Price --> [Name,Age,Price]
* 8.重新组合参数终结符和SQL运算符
* [Name,Age,Price] + [@21@01,@01,@05] = [Name,@21@01,Age,@01,Price,@05]
* 9.解码运算
* [Name,@21@01,Age,@01,Price,@05] --> ...WHERE name LIKE ?s AND age AND price BETWEEN ? AND ?
* ----------------------------------------------------------------------------------------------------------------------------------
*
* 一.findByNameStartingWithAndAgeAndPriceBetween -> NameStartingWithAndAgeAndPriceBetween
* 二.NameStartingWithAndAgeAndPriceBetween -> =StartingWithAnd=And=Between
* 1. opeList : StartingWithAnd=And=Between -> =@21@01=@01=@05 -> [@21@01,@01,@05]
* 2.1. opeSourceList : =StartingWithAnd=And=Between -> [StartingWithAnd,And,Between]
* 2.2. varList : NameStartingWithAndAgeAndPriceBetween+[StartingWithAnd,And,Between] -> Name=Age=Price= ->[Name,Age,Price]
* 三.[@21@01,@01,@05] + [Name,Age,Price] -> [Name,@21@01,Age,@01,Price,@05] -> ...WHERE name LIKE ?s AND age AND price BETWEEN ? AND ?
*
* ----------------------------------------------------------------------------------------------------------------------------------
*/
public class JpaSample {
private final String FIND_BY="^((find|get|read)([\\s\\S]*)By)([\\s\\S]*)$";
private final String REG = "\\@\\d\\d";
private Class<?> pojoClass;
/**
* 运算符转换规则
*/
private static Map<String, String> operationMap;
/**
* 运算符解析规则
*/
private static Map<String, String> parsingMap;
/**
* 所有SQL运算符按照字符长度倒序排序后的集合
*/
private static List<String> lengthSortSqlOpe;
/**
* 当前所有终结符按照字符长度倒序排序后的集合
*/
private List<String> lengthSortField;
/**
* 查询语句的前缀
*/
private StringBuilder selectSql;
/**
* 实体类属性名(首字母大写)与表字段所组成的Map
*/
private Map<String, String> fieldColumnMap;
static {
TypeToken type = new TypeToken<Map<String, String>>(){};
operationMap= (Map<String, String>) Resources.getObject(type,"/lucky-config/config/jpa-coding.json");
parsingMap=(Map<String, String>) Resources.getObject(type,"/lucky-config/config/jpa-decoding.json");
lengthSortSqlOpe=new ArrayList<>(operationMap.keySet());
Collections.sort(lengthSortSqlOpe, new SortByLengthComparator());
}
public JpaSample(Class<?> pojoClass,String dbname) {
this.pojoClass=pojoClass;
selectSql = new StringBuilder("SELECT @:ResultColumn FROM ").append(PojoManage.getTable(pojoClass,dbname));
fieldColumnMap = new HashMap<>();
Field[] fields = ClassUtils.getAllFields(pojoClass);
for (Field field : fields) {
fieldColumnMap.put(LuckyUtils.TableToClass(field.getName()), PojoManage.getTableField(dbname,field));
}
lengthSortField = new ArrayList<>(fieldColumnMap.keySet());
Collections.sort(lengthSortField, new SortByLengthComparator());
}
/**
* 获取findBy表达式中的返回列
* @param jpaSample
* @return
*/
public String getSelectResultColumn(String jpaSample){
jpaSample=jpaSample.substring(0,jpaSample.indexOf("By"));
if(jpaSample.startsWith("find")||jpaSample.startsWith("read")){
jpaSample= jpaSample.substring(4);
}else{
jpaSample= jpaSample.substring(3);
}
if("".equals(jpaSample)||"All".equals(jpaSample)){
return "*";
}
StringBuilder result=new StringBuilder();
for (String field : lengthSortField) {
if(jpaSample.contains(field)){
result.append(fieldColumnMap.get(field)).append(",");
jpaSample=jpaSample.replaceAll(field,"");
}
}
if(!"".equals(jpaSample)){
throw new RuntimeException("不符合JPA规范的查询方法命名!无法识别的「\"结果列(ResultColumn)\"」: \"" + jpaSample+"\"");
}
String resultStr=result.toString();
return resultStr.endsWith(",")?resultStr.substring(0,resultStr.length()-1):resultStr;
}
/**
* 将JPA的findBy表达式解析为SQL语句
*
* @param jpaSample JPA表达式[findByLastnameAndFirstname
* readByLastnameAndFirstname
* getByLastnameAndFirstname]
* @return
*/
public String sampleToSql(String jpaSample) throws IllegalJPAExpressionException {
/*
一.findByNameStartingWithAndAgeAndPriceBetween -> NameStartingWithAndAgeAndPriceBetween
二.NameStartingWithAndAgeAndPriceBetween -> =StartingWithAnd=And=Between
1. opeList : StartingWithAnd=And=Between -> =@21@01=@01=@05 -> [@21@01,@01,@05]
2.1. opeSourceList : =StartingWithAnd=And=Between -> [StartingWithAnd,And,Between]
2.2. varList : NameStartingWithAndAgeAndPriceBetween+[StartingWithAnd,And,Between] -> Name=Age=Price= ->[Name,Age,Price]
三.[@21@01,@01,@05] + [Name,Age,Price] -> [Name,@21@01,Age,@01,Price,@05] -> ...WHERE name LIKE ?s AND age AND price BETWEEN ? AND ?
*/
if(!Regular.check(jpaSample,FIND_BY)){
throw new IllegalJPAExpressionException("不符合JPA规范的查询方法命名:" + jpaSample);
}
String jpaCopy=jpaSample;
//去掉findBy前缀
jpaSample = jpaSample.substring(jpaSample.indexOf("By")+2);
String copy=jpaSample;
for (String field : lengthSortField) {
jpaSample = jpaSample.replaceAll(field, "=");
}
String copy1=jpaSample;
for (String ope : lengthSortSqlOpe) {
copy1=copy1.replaceAll(ope,operationMap.get(ope));
}
List<String> opeList=Arrays.asList(copy1.split("=")).stream().
filter(a -> a != null && !"".equals(a)).collect(Collectors.toList());
List<String> opeSourceList=Arrays.asList(jpaSample.split("=")).stream().
filter(a -> a != null && !"".equals(a)).collect(Collectors.toList());
List<String> copyOpeList = new ArrayList<>(opeSourceList);
Collections.sort(copyOpeList, new SortByLengthComparator());
jpaSample=copy;
for (String ope : copyOpeList) {
jpaSample=jpaSample.replaceAll(ope,"=");
}
List<String> varList=Arrays.asList(jpaSample.split("="))
.stream().filter(a -> a != null && !"".equals(a)).collect(Collectors.toList());
List<String> varOpeSortList = getVarOpeSortList(varList, opeList, jpaSample);
try {
joint(varOpeSortList);
return selectSql.toString().replaceAll("@:ResultColumn",getSelectResultColumn(jpaCopy));
} catch (SQLException e) {
throw new RuntimeException("错误的Mapper方法[不符合Jpa规范]==>"+jpaCopy,e);
}
}
public List<String> getVarOpeSortList(List<String> varList, List<String> opeList, String jpaSample) {
List<String> varOpeSortList = new ArrayList<>();
boolean varStatr = jpaSample.startsWith(varList.get(0));
boolean varEnd = jpaSample.endsWith(varList.get(varList.size() - 1));
int varSize = varList.size();
int opeSize = opeList.size();
if (varStatr && varEnd) {//以终结符开头,以终结符结尾
for (int i = 0; i < opeSize; i++) {
varOpeSortList.add(varList.get(i));
varOpeSortList.add(opeList.get(i));
}
varOpeSortList.add(varList.get(varSize - 1));
} else if (varStatr && !varEnd) {//以终结符开头,以运算符结尾
for (int i = 0; i < opeSize; i++) {
varOpeSortList.add(varList.get(i));
varOpeSortList.add(opeList.get(i));
}
} else if (!varStatr && varEnd) {//以运算符开头,以终结符结尾
for (int i = 0; i < varSize; i++) {
varOpeSortList.add(opeList.get(i));
varOpeSortList.add(varList.get(i));
}
} else {//以运算符开头,以运算符结尾
for (int i = 0; i < varSize; i++) {
varOpeSortList.add(opeList.get(i));
varOpeSortList.add(varList.get(i));
}
varOpeSortList.add(opeList.get(opeSize - 1));
}
return varOpeSortList;
}
public void joint(List<String> varOpeSortList) throws SQLException {
if(varOpeSortList.isEmpty())
return;
if(!varOpeSortList.get(0).startsWith("@13")){
selectSql.append(" WHERE ");
}
for (int i = 0; i < varOpeSortList.size(); i++) {
String currStr=varOpeSortList.get(i);
if(currStr.startsWith("@")){//运算符
currStr=currStr.replaceAll("@","_@").substring(1);
String[] opeArray=currStr.split("_");
for (int j = 0; j < opeArray.length; j++) {
if(opeArray[0].equals("@28")&&i!=0){
selectSql.append(parsingMap.get(opeArray[j]).replaceAll("@X",fieldColumnMap.get(varOpeSortList.get(i-1))));
continue;
}
selectSql.append(parsingMap.get(opeArray[j]));
}
}else {//终结符
if (varOpeSortList.size() == 1 ||
(i == varOpeSortList.size() - 1 && (varOpeSortList.get(i - 1).endsWith("@01") || varOpeSortList.get(i - 1).endsWith("@02")))
||
(i != varOpeSortList.size() - 1 && (varOpeSortList.get(i + 1).startsWith("@01") || varOpeSortList.get(i + 1).startsWith("@02") || varOpeSortList.get(i + 1).startsWith("@13")))) {
if (fieldColumnMap.containsKey(currStr)) {
selectSql.append(fieldColumnMap.get(currStr) + " = ? ");
} else {
throw new SQLException("无法识别的实体属性:\"" + currStr + "\" ,实体类为:" + pojoClass);
}
//当前为终结符,下一个为Or And OrderBy时 ==>name=?
//当前为终结符,且为最后一个操作符,上一个为Or And时 ==>name=?
} else if (i != varOpeSortList.size() - 1 && varOpeSortList.get(i + 1).startsWith("@28")) {
continue;
} else {
if (fieldColumnMap.containsKey(currStr)) {
selectSql.append(fieldColumnMap.get(currStr));
} else {
throw new SQLException("无法识别的实体属性:\"" + currStr + "\" ,实体类为:" + pojoClass);
}
}
}
}
}
}
class SortByLengthComparator implements Comparator<String> {
@Override
public int compare(String var1, String var2) {
if (var1.length() > var2.length()) {
return -1;
} else if (var1.length() == var2.length()) {
return 0;
} else {
return 1;
}
}
}
|
3e0e82fbbfa30945e07e0455caafae49ffbf5a7b | 1,406 | java | Java | ThalesArc/src/test/java/cs01.app/SliderTests.java | CS01-UofG/CS01-Team-Project | 89a8276c2e5dd531b0e85f4e61d14de4a3ba6301 | [
"MIT"
] | null | null | null | ThalesArc/src/test/java/cs01.app/SliderTests.java | CS01-UofG/CS01-Team-Project | 89a8276c2e5dd531b0e85f4e61d14de4a3ba6301 | [
"MIT"
] | null | null | null | ThalesArc/src/test/java/cs01.app/SliderTests.java | CS01-UofG/CS01-Team-Project | 89a8276c2e5dd531b0e85f4e61d14de4a3ba6301 | [
"MIT"
] | null | null | null | 29.291667 | 83 | 0.743243 | 6,163 | package Tests;
import com.sun.javafx.application.PlatformImpl;
import cs01.ComponentFactory;
import javafx.scene.control.Slider;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
public class SliderTests {
private ComponentFactory componentFactory = new ComponentFactory();
@BeforeClass
public static void setup() {
// we need this in order to use fx components
PlatformImpl.startup(() -> {});
}
@Test
public void shouldNotBeNull() {
var slider = componentFactory.createSlider(0, 360, 40, 20, 5, 1);
assertNotNull(slider);
}
@Test
public void shouldBeCreatedCorrectly() {
var slider = componentFactory.createSlider(0, 360, 40, 20, 5, 1);
Slider correctSlider = new Slider();
correctSlider.setMin(0);
correctSlider.setMax(360);
correctSlider.setValue(40);
correctSlider.setMajorTickUnit(20);
correctSlider.setMinorTickCount(5);
correctSlider.setBlockIncrement(1);
assertEquals(slider.getMin(), correctSlider.getMin(), 0.0);
assertEquals(slider.getMax(), correctSlider.getMax(), 0.0);
assertEquals(slider.getValue(), correctSlider.getValue(), 0.0);
assertEquals(slider.getMajorTickUnit(), correctSlider.getMajorTickUnit(), 0.0);
assertEquals(slider.getMinorTickCount(), correctSlider.getMinorTickCount());
assertEquals(slider.getBlockIncrement(), correctSlider.getBlockIncrement(), 0.0);
}
}
|
3e0e83fa1e08279c1239f8ab91984c75b7908847 | 2,245 | java | Java | IoTClass2021_JavaProject/src/it/beltek/ia/iotlab/edge/gateway/service/simulation/RejectFieldbusThread.java | NicoloToscani/CoAP-factory-monitoring | 3c7cf9aff13b3616b06068c94ffeee6158502184 | [
"MIT"
] | 1 | 2021-09-22T21:51:08.000Z | 2021-09-22T21:51:08.000Z | IoTClass2021_JavaProject/src/it/beltek/ia/iotlab/edge/gateway/service/simulation/RejectFieldbusThread.java | NicoloToscani/CoAP-factory-monitoring | 3c7cf9aff13b3616b06068c94ffeee6158502184 | [
"MIT"
] | null | null | null | IoTClass2021_JavaProject/src/it/beltek/ia/iotlab/edge/gateway/service/simulation/RejectFieldbusThread.java | NicoloToscani/CoAP-factory-monitoring | 3c7cf9aff13b3616b06068c94ffeee6158502184 | [
"MIT"
] | null | null | null | 23.14433 | 126 | 0.735857 | 6,164 | package it.beltek.ia.iotlab.edge.gateway.service.simulation;
import java.util.Date;
import java.util.Random;
import java.util.Timer;
import java.util.TimerTask;
import it.beltek.ia.iotlab.edge.gateway.service.RejectModbusService;
public class RejectFieldbusThread implements Runnable {
private Timer requestTimer;
private RejectModbusService rejectModbusService;
private Random unitWeightRandom;
public RejectFieldbusThread(RejectModbusService rejectModbusService) {
this.rejectModbusService = rejectModbusService;
this.requestTimer = new Timer();
this.unitWeightRandom = new Random();
this.rejectModbusService.getWeightSystem().setpoint = 50;
this.rejectModbusService.getWeightSystemSimulate().setpoint = 50;
this.rejectModbusService.getWeightSystem().lineVelocitySetpoint = 10;
this.rejectModbusService.getWeightSystemSimulate().lineVelocitySetpoint = 10;
}
@Override
public void run() {
System.out.println("Reject simulation start at " + new Date());
requestTimer.schedule(new RequestTimerTask(this), 0, 10000); // 5 s
}
private class RequestTimerTask extends TimerTask{
RejectFieldbusThread rejectFieldbusThread;
public RequestTimerTask(RejectFieldbusThread rejectFieldbusThread) {
this.rejectFieldbusThread = rejectFieldbusThread;
}
@Override
public void run() {
System.out.println("New unit on conveyor");
float unitWeight = this.rejectFieldbusThread.unitWeightRandom.nextFloat() * 100.0f;
System.out.println("Weight: " + unitWeight);
if(checkUnitWeight(unitWeight, this.rejectFieldbusThread.rejectModbusService.getWeightSystemSimulate().setpoint) == true) {
// Total units
this.rejectFieldbusThread.rejectModbusService.getWeightSystemSimulate().totalCount ++;
// Last unit weight
this.rejectFieldbusThread.rejectModbusService.getWeightSystemSimulate().weight = unitWeight;
}
}
}
private boolean checkUnitWeight(float unitWeight, float thr) {
System.out.println("Peso: " + unitWeight + " Soglia: " + thr);
if(unitWeight <= this.rejectModbusService.getWeightSystem().setpoint) {
return true;
}
else {
return false;
}
}
}
|
3e0e840a67099222cbc8f334b1792ff8378e472d | 489 | java | Java | 02_App_Android/kchartlib/src/main/java/com/github/tifezh/kchartlib/utils/WonderfulLogUtils.java | sdzczh/crypto-exchange | 51242310a6af461d9e7e25c187fc6c2bacecf03e | [
"Apache-2.0"
] | 32 | 2019-08-04T14:09:53.000Z | 2022-02-26T09:44:59.000Z | 02_App_Android/kchartlib/src/main/java/com/github/tifezh/kchartlib/utils/WonderfulLogUtils.java | sdzczh/crypto-exchange | 51242310a6af461d9e7e25c187fc6c2bacecf03e | [
"Apache-2.0"
] | null | null | null | 02_App_Android/kchartlib/src/main/java/com/github/tifezh/kchartlib/utils/WonderfulLogUtils.java | sdzczh/crypto-exchange | 51242310a6af461d9e7e25c187fc6c2bacecf03e | [
"Apache-2.0"
] | 91 | 2019-08-05T02:13:35.000Z | 2022-02-25T09:07:58.000Z | 16.3 | 52 | 0.648262 | 6,165 | package com.github.tifezh.kchartlib.utils;
import android.app.Activity;
import android.util.Log;
/**
* Created by Administrator on 2017/8/30.
*/
public class WonderfulLogUtils {
private static String TAG = "com.github.tifezh";
public static void logi(String content) {
Log.i(TAG, content);
}
public static void loge(String content) {
Log.e(TAG, content);
}
public static void logd(String content) {
Log.d(TAG, content);
}
}
|
3e0e8497e194cc6ef144e3420476764f01b2326d | 1,143 | java | Java | src/com/xmut/estore/filter/EncodingFilter.java | cntianjue/Estore | 549062c7f65836a75cb155e83ef60779f28c1b0f | [
"MIT"
] | 1 | 2019-06-11T00:58:02.000Z | 2019-06-11T00:58:02.000Z | src/com/xmut/estore/filter/EncodingFilter.java | cntianjue/Estore | 549062c7f65836a75cb155e83ef60779f28c1b0f | [
"MIT"
] | null | null | null | src/com/xmut/estore/filter/EncodingFilter.java | cntianjue/Estore | 549062c7f65836a75cb155e83ef60779f28c1b0f | [
"MIT"
] | null | null | null | 24.319149 | 129 | 0.754156 | 6,166 | package com.xmut.estore.filter;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import com.xmut.estore.db.JDBCUtils;
/**
* Servlet Filter implementation class EncodingFilter
*/
public class EncodingFilter implements Filter {
/**
* Default constructor.
*/
public EncodingFilter() {
// TODO Auto-generated constructor stub
}
/**
* @see Filter#destroy()
*/
public void destroy() {
}
/**
* @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain)
*/
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
String encoding = filterConfig.getServletContext().getInitParameter("encoding");
request.setCharacterEncoding(encoding);
chain.doFilter(request, response);
}
private FilterConfig filterConfig = null;
public void init(FilterConfig fConfig) throws ServletException {
this.filterConfig = fConfig;
}
}
|
3e0e84b9c13c7e16e6330ced6498a4b6ef520866 | 5,115 | java | Java | app/src/main/java/nl/babbq/conference2015/fragments/ListingFragment.java | jigar-sable/conference-app | 8bb7a0739ec29b238d92dcecf25140f137b64549 | [
"Apache-2.0"
] | 22 | 2015-11-15T20:37:11.000Z | 2019-11-05T12:50:06.000Z | app/src/main/java/nl/babbq/conference2015/fragments/ListingFragment.java | jigar-sable/conference-app | 8bb7a0739ec29b238d92dcecf25140f137b64549 | [
"Apache-2.0"
] | 1 | 2016-08-15T08:24:11.000Z | 2016-08-15T08:24:11.000Z | app/src/main/java/nl/babbq/conference2015/fragments/ListingFragment.java | jigar-sable/conference-app | 8bb7a0739ec29b238d92dcecf25140f137b64549 | [
"Apache-2.0"
] | 20 | 2015-11-12T06:54:42.000Z | 2020-05-24T08:19:26.000Z | 38.171642 | 113 | 0.671359 | 6,167 | package nl.babbq.conference2015.fragments;
import android.content.Intent;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.ActivityOptionsCompat;
import android.support.v4.app.Fragment;
import android.support.v4.util.Pair;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.android.internal.util.Predicate;
import java.util.ArrayList;
import java.util.List;
import nl.babbq.conference2015.ConferenceActivity;
import nl.babbq.conference2015.MainActivity;
import nl.babbq.conference2015.R;
import nl.babbq.conference2015.adapters.HasAdapter;
import nl.babbq.conference2015.adapters.MainAdapter;
import nl.babbq.conference2015.objects.Conference;
import nl.babbq.conference2015.objects.ConferenceDay;
import nl.babbq.conference2015.utils.DividerItemDecoration;
import nl.babbq.conference2015.utils.ItemClickSupport;
import nl.babbq.conference2015.utils.Utils;
/**
* Created by nono on 10/6/15.
*/
public class ListingFragment extends Fragment implements HasAdapter {
private final static String DATA = "data";
private final static String DAY = "day";
private RecyclerView mRecyclerView;
private List<Conference> mData = new ArrayList<>();
private ConferenceDay mDay;
private MainAdapter mAdapter;
public static ListingFragment newInstance(ArrayList<Conference> conferences, final ConferenceDay day) {
Bundle args = new Bundle();
args.putParcelableArrayList(DATA, filterList(conferences, day));
args.putSerializable(DAY, day);
ListingFragment fragment = new ListingFragment();
fragment.setArguments(args);
return fragment;
}
public ListingFragment() {
}
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
if (getArguments() != null) {
mDay = (ConferenceDay) getArguments().getSerializable(DAY);
mData.addAll(getArguments().<Conference>getParcelableArrayList(DATA));
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup parent, Bundle savedInstaceState) {
mRecyclerView = (RecyclerView)inflater
.inflate(R.layout.fragment_listing, parent, false);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
mRecyclerView.setHasFixedSize(true);
mRecyclerView.addItemDecoration(new DividerItemDecoration(getContext(),
DividerItemDecoration.VERTICAL_LIST));
ItemClickSupport.addTo(mRecyclerView).setOnItemClickListener(new ItemClickSupport.OnItemClickListener() {
@Override
public void onItemClicked(RecyclerView recyclerView, int position, View v) {
if (mData.get(position).getSpeaker().length() == 0) {
// if the speaker field is empty, it's probably a rre break or lunch
return;
}
// On Lollipop and above we animate the conference title
// to the second activity
Pair<View, String> headline = Pair.create(v.findViewById(R.id.headline),
getString(R.string.headline));
Bundle bundle = ActivityOptionsCompat.makeSceneTransitionAnimation(getActivity(),
headline).toBundle();
Intent intent = new Intent(getActivity(), ConferenceActivity.class);
intent.putExtra("conference", (Parcelable)mData.get(position));
ActivityCompat.startActivity(getActivity(), intent, bundle);
}
});
if (mDay.isToday()) {
int position = Conference.findNextEventPosition(mData);
mRecyclerView.smoothScrollToPosition(position);
}
return mRecyclerView;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mAdapter = new MainAdapter(getActivity(), mData);
mRecyclerView.setAdapter(mAdapter);
}
@Override
public void notifyDataSetChanged() {
if (isAdded() && mAdapter != null) {
if (getActivity() instanceof MainActivity) {
ArrayList<Conference> newList = filterList(
((MainActivity)getActivity()).getConferences(), mDay);
if (newList != null) {
mData.clear();
mData.addAll(newList);
}
}
mAdapter.notifyDataSetChanged();
}
}
private static ArrayList<Conference> filterList(ArrayList<Conference> list, final ConferenceDay day) {
Predicate<Conference> aDay = new Predicate<Conference>() {
public boolean apply(Conference conference) {
return conference.getStartDate().startsWith(day.getDay());
}
};
return Utils.filter(list, aDay);
}
}
|
3e0e856ef49d9a11d958bd30e0ee3a532746bb9e | 1,087 | java | Java | instrumentation-engine/src/main/java/org/glowroot/instrumentation/engine/bytecode/api/NopGetter.java | glowroot/glowroot-instrumentation | 0a70212737947f40b0cc90d90016a6b5db8f8b95 | [
"Apache-2.0"
] | 1 | 2022-01-30T08:14:32.000Z | 2022-01-30T08:14:32.000Z | instrumentation-engine/src/main/java/org/glowroot/instrumentation/engine/bytecode/api/NopGetter.java | glowroot/glowroot-instrumentation | 0a70212737947f40b0cc90d90016a6b5db8f8b95 | [
"Apache-2.0"
] | 1 | 2019-09-24T08:47:28.000Z | 2019-09-24T23:49:51.000Z | instrumentation-engine/src/main/java/org/glowroot/instrumentation/engine/bytecode/api/NopGetter.java | glowroot/glowroot-instrumentation | 0a70212737947f40b0cc90d90016a6b5db8f8b95 | [
"Apache-2.0"
] | 1 | 2019-06-20T18:36:01.000Z | 2019-06-20T18:36:01.000Z | 31.970588 | 75 | 0.74517 | 6,168 | /*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.engine.bytecode.api;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.glowroot.instrumentation.api.Getter;
// used by generated advice
public class NopGetter implements Getter<Object> {
public static final Getter<Object> GETTER = new NopGetter();
public static final Object CARRIER = new Object();
@Override
public @Nullable String get(Object carrier, String key) {
return null;
}
}
|
3e0e85aafe529df553ccfe24fb56999148686dd4 | 2,705 | java | Java | tools.hadoop/src/ext/mapred/org/apache/hadoop/mapred/JobTrackerStatistics.java | aqnotecom/java.tools | 4a60d37f51e141f9cffa0ea89582402991fa89e5 | [
"Apache-2.0"
] | null | null | null | tools.hadoop/src/ext/mapred/org/apache/hadoop/mapred/JobTrackerStatistics.java | aqnotecom/java.tools | 4a60d37f51e141f9cffa0ea89582402991fa89e5 | [
"Apache-2.0"
] | null | null | null | tools.hadoop/src/ext/mapred/org/apache/hadoop/mapred/JobTrackerStatistics.java | aqnotecom/java.tools | 4a60d37f51e141f9cffa0ea89582402991fa89e5 | [
"Apache-2.0"
] | null | null | null | 29.358696 | 93 | 0.718623 | 6,169 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2013-2016 Peng Li<dycjh@example.com>.
* This library is free software; you can redistribute it and/or modify it under the terms of
* the GNU Lesser General Public License as published by the Free Software Foundation;
*/ org.apache.hadoop.mapred;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
/**
* Collects the job tracker statistics.
*
*/
class JobTrackerStatistics {
final StatisticsCollector collector;
final Map<String, TaskTrackerStat> ttStats =
new HashMap<String, TaskTrackerStat>();
JobTrackerStatistics() {
collector = new StatisticsCollector();
collector.start();
}
synchronized void taskTrackerAdded(String name) {
TaskTrackerStat stat = ttStats.get(name);
if(stat == null) {
stat = new TaskTrackerStat(name);
ttStats.put(name, stat);
}
}
synchronized void taskTrackerRemoved(String name) {
TaskTrackerStat stat = ttStats.remove(name);
if(stat != null) {
stat.remove();
}
}
synchronized TaskTrackerStat getTaskTrackerStat(String name) {
return ttStats.get(name);
}
class TaskTrackerStat {
final String totalTasksKey;
final Stat totalTasksStat;
final String succeededTasksKey;
final Stat succeededTasksStat;
TaskTrackerStat(String trackerName) {
totalTasksKey = trackerName+"-"+"totalTasks";
totalTasksStat = collector.createStat(totalTasksKey);
succeededTasksKey = trackerName+"-"+"succeededTasks";
succeededTasksStat = collector.createStat(succeededTasksKey);
}
synchronized void incrTotalTasks() {
totalTasksStat.inc();
}
synchronized void incrSucceededTasks() {
succeededTasksStat.inc();
}
synchronized void remove() {
collector.removeStat(totalTasksKey);
collector.removeStat(succeededTasksKey);
}
}
}
|
3e0e85cd55d31f4c3ff0900eafeb372d82470d19 | 1,051 | java | Java | header/src/main/java/org/zstack/header/zone/APIGetZoneReply.java | SnailJie/ZstackWithComments | 5d4deb003523ddece3a9d200d898849b7c9a6518 | [
"Apache-2.0"
] | 3 | 2018-02-13T16:37:27.000Z | 2020-10-12T03:10:23.000Z | header/src/main/java/org/zstack/header/zone/APIGetZoneReply.java | SnailJie/ZstackWithComments | 5d4deb003523ddece3a9d200d898849b7c9a6518 | [
"Apache-2.0"
] | null | null | null | header/src/main/java/org/zstack/header/zone/APIGetZoneReply.java | SnailJie/ZstackWithComments | 5d4deb003523ddece3a9d200d898849b7c9a6518 | [
"Apache-2.0"
] | 4 | 2018-01-09T07:42:46.000Z | 2020-12-16T10:29:07.000Z | 28.405405 | 70 | 0.687916 | 6,170 | package org.zstack.header.zone;
import org.zstack.header.message.APIReply;
import java.sql.Timestamp;
import java.util.List;
import static java.util.Arrays.asList;
public class APIGetZoneReply extends APIReply {
private List<ZoneInventory> inventories;
public List<ZoneInventory> getInventories() {
return inventories;
}
public void setInventories(List<ZoneInventory> inventories) {
this.inventories = inventories;
}
public static APIGetZoneReply __example__() {
APIGetZoneReply reply = new APIGetZoneReply();
ZoneInventory zone = new ZoneInventory();
zone.setName("TestZone");
zone.setUuid(uuid());
zone.setDescription("Test");
zone.setState(ZoneState.Enabled.toString());
zone.setType("zstack");
zone.setCreateDate(new Timestamp(System.currentTimeMillis()));
zone.setLastOpDate(new Timestamp(System.currentTimeMillis()));
reply.setInventories(asList(zone));
reply.setSuccess(true);
return reply;
}
}
|
3e0e85d70595ec53f8d7370deee2767d8f60cd88 | 1,276 | java | Java | attic/nodes/src/main/java/com/github/linggify/attic/nodes/ClearScreenNode.java | Linggify/AtticEngine | 9cf5f11094fc70b5befd027ba79480315ffa248e | [
"Apache-2.0"
] | null | null | null | attic/nodes/src/main/java/com/github/linggify/attic/nodes/ClearScreenNode.java | Linggify/AtticEngine | 9cf5f11094fc70b5befd027ba79480315ffa248e | [
"Apache-2.0"
] | 10 | 2017-07-13T08:11:16.000Z | 2017-07-13T08:32:30.000Z | attic/nodes/src/main/java/com/github/linggify/attic/nodes/ClearScreenNode.java | Linggify/AtticEngine | 9cf5f11094fc70b5befd027ba79480315ffa248e | [
"Apache-2.0"
] | null | null | null | 22.785714 | 81 | 0.706113 | 6,171 | package com.github.linggify.attic.nodes;
import com.github.linggify.attic.exceptions.AtticRuntimeException;
import com.github.linggify.attic.render.IContext;
import com.github.linggify.attic.render.path.INode;
import com.github.linggify.attic.util.Color;
/**
* An {@link INode} used to clear the default framebuffer
* @author ueuui
*
*/
public class ClearScreenNode implements INode{
public static final String CLEAR_COLOR = "clear_color";
private IContext mContext;
private boolean mCleared;
private IInput mColor;
@Override
public void setRenderHepler(IContext helper) {
mContext = helper;
}
@Override
public void setInput(String name, IInput input) {
if(name.equals(CLEAR_COLOR)) mColor = input;
else throw new AtticRuntimeException(name + " is not a valid input");
}
@Override
public void prepare() {
mCleared = false;
}
@Override
public int getOutputId(String name) {
return 0;
}
@SuppressWarnings("unchecked")
@Override
public <T> T getOutput(int index, Class<T> type) throws AtticRuntimeException {
if(!mCleared) {
mContext.setClearColor(-1, mColor.getValue(Color.class));
mContext.clearRenderTargets();
}
Integer result = 0;
return (T) result;
}
}
|
3e0e8659fa3ca3c8c0f35211ce7ca9e67da06af3 | 5,692 | java | Java | TeamCode/src/main/java/org/firstinspires/ftc/teamcode/BlueTowerHigh.java | pranavnightsforrobotics/FtcRobotController-master | ea6ff2acc95b28aa378b2b540f423ace17f7791e | [
"MIT"
] | null | null | null | TeamCode/src/main/java/org/firstinspires/ftc/teamcode/BlueTowerHigh.java | pranavnightsforrobotics/FtcRobotController-master | ea6ff2acc95b28aa378b2b540f423ace17f7791e | [
"MIT"
] | null | null | null | TeamCode/src/main/java/org/firstinspires/ftc/teamcode/BlueTowerHigh.java | pranavnightsforrobotics/FtcRobotController-master | ea6ff2acc95b28aa378b2b540f423ace17f7791e | [
"MIT"
] | null | null | null | 36.254777 | 90 | 0.646873 | 6,172 | package org.firstinspires.ftc.teamcode;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.DigitalChannel;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.EncoderAndPIDDriveTrain;
//Back up Auton that goes to the wall side of the bridge, and parks there
@Autonomous (name = "BLueTowerHigh")
//@Disabled
public class BlueTowerHigh extends LinearOpMode {
//initializaing the future variables
private ElapsedTime runtime = new ElapsedTime();
DcMotor LFMotor, LBMotor, RFMotor, RBMotor, carouselMotor, armMotor;
private double carouselSpeed = 0.70, armSpeed=0.6;
private Servo turnServo, clawServo;
EncoderAndPIDDriveTrain drive;
EncoderArm turn;
BNO055IMU imu;
//no. of ticks per one revolution of the yellow jacket motors
int Ticks_Per_Rev = 1316;
@Override
public void runOpMode() throws InterruptedException {
// Initialize the hardware variables.
LFMotor = hardwareMap.get(DcMotor.class, "LF Motor");
LBMotor = hardwareMap.get(DcMotor.class, "LB Motor");
RFMotor = hardwareMap.get(DcMotor.class, "RF Motor");
RBMotor = hardwareMap.get(DcMotor.class, "RB Motor");
imu = hardwareMap.get(BNO055IMU.class, "imu");
carouselMotor = hardwareMap.get(DcMotor.class, "Carousel Motor");
armMotor = hardwareMap.get(DcMotor.class, "Arm Motor");
clawServo = hardwareMap.get(Servo.class, "Claw Servo");
turnServo = hardwareMap.get(Servo.class, "Turn Servo");
LFMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
LBMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
RFMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
RBMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
armMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
//Wheels on the chassis functions
drive = new EncoderAndPIDDriveTrain(LFMotor, LBMotor, RFMotor, RBMotor, imu);
turn = new EncoderArm(armMotor);
//Reverse the right motors to move forward based on their orientation on the robot
carouselMotor.setDirection(DcMotor.Direction.REVERSE);
clawServo.setDirection(Servo.Direction.FORWARD);
turnServo.setDirection(Servo.Direction.FORWARD);
carouselMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// Wait for the game to start (driver presses PLAY)
telemetry.addData("Mode", "Init");
telemetry.update();
runtime.reset();
waitForStart();
//Running the code
LFMotor.getCurrentPosition();
if (opModeIsActive()) {
//Strafe Left to go over the pipes
clawServo.setPosition(0.6);
turnServo.setPosition(Servo.MIN_POSITION);
drive.DriveBackwardDistance(1,3);
turn.TurnArmDistance(2,-178);
turnServo.setPosition(Servo.MAX_POSITION);
sleep(100);
drive.DriveBackwardDistance(1,5);
clawServo.setPosition(Servo.MIN_POSITION);
sleep(500);
clawServo.setPosition(0.6);
drive.DriveForwardDistance(1,3);
drive.TurnLeftDistance(1,15);
drive.StrafeRightDistance(1,10);
turnServo.setPosition(Servo.MIN_POSITION);
turn.TurnArmDistance(2, 168);
/* LOW GOAL
clawServo.setPosition(0.6);
turnServo.setPosition(Servo.MIN_POSITION);
drive.DriveBackwardDistance(1,3);
turn.TurnArmDistance(2,-178);
turnServo.setPosition(Servo.MAX_POSITION);
sleep(100);
drive.DriveBackwardDistance(1,5);
clawServo.setPosition(Servo.MIN_POSITION);
sleep(500);
clawServo.setPosition(0.6);
drive.DriveForwardDistance(1,3);
drive.TurnLeftDistance(1,15);
drive.StrafeRightDistance(1,10);
turnServo.setPosition(Servo.MIN_POSITION);
turn.TurnArmDistance(2, 168);
*/
/* MID GOAL
drive.DriveBackwardDistance(1,4);
clawServo.setPosition(0.6);
turnServo.setPosition(Servo.MIN_POSITION);
turn.TurnArmDistance(2,-156);
turnServo.setPosition(0.65);
drive.DriveBackwardDistance(1,5);
sleep(300);
clawServo.setPosition(Servo.MIN_POSITION);
sleep(500);
clawServo.setPosition(0.6);
drive.DriveForwardDistance(1,3);
drive.TurnLeftDistance(1,15);
drive.StrafeRightDistance(1,15);
turnServo.setPosition(Servo.MIN_POSITION);
turn.TurnArmDistance(2,146);
*/
/* HIGH GOAL
drive.DriveBackwardDistance(1,15);
turnServo.setPosition(Servo.MIN_POSITION);
clawServo.setPosition(0.6);
turn.TurnArmDistance(2,-132);
turnServo.setPosition(0.5);
sleep(300);
clawServo.setPosition(Servo.MIN_POSITION);
sleep(500);
turnServo.setPosition(Servo.MIN_POSITION);
clawServo.setPosition(0.6);
drive.DriveForwardDistance(1,10);
drive.TurnLeftDistance(1,17);
drive.StrafeRightDistance(1,10);
turn.TurnArmDistance(2,122);
*/
}
}
} |
3e0e86aff58de1478b9fe80d1a2e23d6fc0f5469 | 5,308 | java | Java | modules/base/xdebugger-impl/src/main/java/com/intellij/xdebugger/impl/breakpoints/ui/XBreakpointActionsPanel.java | MC-JY/consulo | ebd31008fcfd03e144b46a9408d2842d0b06ffc8 | [
"Apache-2.0"
] | 634 | 2015-01-01T19:14:25.000Z | 2022-03-22T11:42:50.000Z | modules/base/xdebugger-impl/src/main/java/com/intellij/xdebugger/impl/breakpoints/ui/XBreakpointActionsPanel.java | MC-JY/consulo | ebd31008fcfd03e144b46a9408d2842d0b06ffc8 | [
"Apache-2.0"
] | 410 | 2015-01-19T09:57:51.000Z | 2022-03-22T16:24:59.000Z | modules/base/xdebugger-impl/src/main/java/com/intellij/xdebugger/impl/breakpoints/ui/XBreakpointActionsPanel.java | MC-JY/consulo | ebd31008fcfd03e144b46a9408d2842d0b06ffc8 | [
"Apache-2.0"
] | 50 | 2015-03-10T04:14:49.000Z | 2022-03-22T07:08:45.000Z | 37.914286 | 172 | 0.765825 | 6,173 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.breakpoints.ui;
import com.intellij.openapi.project.Project;
import com.intellij.xdebugger.XExpression;
import com.intellij.xdebugger.breakpoints.XBreakpointManager;
import com.intellij.xdebugger.breakpoints.XLineBreakpoint;
import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.impl.XDebuggerUtilImpl;
import com.intellij.xdebugger.impl.breakpoints.XBreakpointBase;
import com.intellij.xdebugger.impl.ui.DebuggerUIUtil;
import com.intellij.xdebugger.impl.ui.XDebuggerExpressionComboBox;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
/**
* Created by IntelliJ IDEA.
* User: zajac
* Date: 18.06.11
* Time: 9:45
* To change this template use File | Settings | File Templates.
*/
public class XBreakpointActionsPanel extends XBreakpointPropertiesSubPanel {
public static final String LOG_EXPRESSION_HISTORY_ID = "breakpointLogExpression";
private JCheckBox myLogMessageCheckBox;
private JCheckBox myLogExpressionCheckBox;
private JPanel myLogExpressionPanel;
private JPanel myContentPane;
private JPanel myMainPanel;
private JCheckBox myTemporaryCheckBox;
private JPanel myExpressionPanel;
private XDebuggerExpressionComboBox myLogExpressionComboBox;
public void init(Project project, XBreakpointManager breakpointManager, @Nonnull XBreakpointBase breakpoint, @Nullable XDebuggerEditorsProvider debuggerEditorsProvider) {
init(project, breakpointManager, breakpoint);
if (debuggerEditorsProvider != null) {
ActionListener listener = new ActionListener() {
public void actionPerformed(final ActionEvent e) {
onCheckboxChanged();
}
};
myLogExpressionComboBox = new XDebuggerExpressionComboBox(project, debuggerEditorsProvider, LOG_EXPRESSION_HISTORY_ID, myBreakpoint.getSourcePosition(), true);
JComponent logExpressionComponent = myLogExpressionComboBox.getComponent();
myLogExpressionPanel.add(logExpressionComponent, BorderLayout.CENTER);
myLogExpressionComboBox.setEnabled(false);
myTemporaryCheckBox.setVisible(breakpoint instanceof XLineBreakpoint);
myLogExpressionCheckBox.addActionListener(listener);
DebuggerUIUtil.focusEditorOnCheck(myLogExpressionCheckBox, myLogExpressionComboBox.getEditorComponent());
}
else {
myExpressionPanel.getParent().remove(myExpressionPanel);
}
}
@Override
public boolean lightVariant(boolean showAllOptions) {
if (!showAllOptions && !myBreakpoint.isLogMessage() && myBreakpoint.getLogExpression() == null &&
(!(myBreakpoint instanceof XLineBreakpoint) || !((XLineBreakpoint)myBreakpoint).isTemporary()) ) {
myMainPanel.setVisible(false);
return true;
} else {
myMainPanel.setBorder(null);
return false;
}
}
private void onCheckboxChanged() {
if (myLogExpressionComboBox != null) {
myLogExpressionComboBox.setEnabled(myLogExpressionCheckBox.isSelected());
}
}
@Override
void loadProperties() {
myLogMessageCheckBox.setSelected(myBreakpoint.isLogMessage());
if (myBreakpoint instanceof XLineBreakpoint) {
myTemporaryCheckBox.setSelected(((XLineBreakpoint)myBreakpoint).isTemporary());
}
if (myLogExpressionComboBox != null) {
XExpression logExpression = myBreakpoint.getLogExpressionObjectInt();
myLogExpressionComboBox.setExpression(logExpression);
myLogExpressionCheckBox.setSelected(myBreakpoint.isLogExpressionEnabled() && logExpression != null);
}
onCheckboxChanged();
}
@Override
void saveProperties() {
myBreakpoint.setLogMessage(myLogMessageCheckBox.isSelected());
if (myBreakpoint instanceof XLineBreakpoint) {
((XLineBreakpoint)myBreakpoint).setTemporary(myTemporaryCheckBox.isSelected());
}
if (myLogExpressionComboBox != null) {
XExpression expression = myLogExpressionComboBox.getExpression();
XExpression logExpression = !XDebuggerUtilImpl.isEmptyExpression(expression) ? expression : null;
myBreakpoint.setLogExpressionEnabled(logExpression == null || myLogExpressionCheckBox.isSelected());
myBreakpoint.setLogExpressionObject(logExpression);
myLogExpressionComboBox.saveTextInHistory();
}
}
JComponent getDefaultFocusComponent() {
if (myLogExpressionComboBox != null && myLogExpressionComboBox.getComboBox().isEnabled()) {
return myLogExpressionComboBox.getEditorComponent();
}
return null;
}
public void dispose() {
}
public void hide() {
myContentPane.setVisible(false);
}
}
|
3e0e884dbd68433e73afd00add9eb0908aebac0d | 32,501 | java | Java | src/main/java/org/testng/internal/TestInvoker.java | hknerts/testng | ef0534161a4f3edec5a4cc99e8bc7c6b6b9a42b8 | [
"Apache-2.0"
] | 1 | 2019-09-13T15:44:54.000Z | 2019-09-13T15:44:54.000Z | src/main/java/org/testng/internal/TestInvoker.java | dlandis/testng | 3b6cd4b94069e67126076e60212f9c3a212e8bdd | [
"Apache-2.0"
] | null | null | null | src/main/java/org/testng/internal/TestInvoker.java | dlandis/testng | 3b6cd4b94069e67126076e60212f9c3a212e8bdd | [
"Apache-2.0"
] | null | null | null | 39.205066 | 119 | 0.704594 | 6,174 | package org.testng.internal;
import static org.testng.internal.Invoker.CAN_RUN_FROM_CLASS;
import static org.testng.internal.invokers.InvokedMethodListenerMethod.AFTER_INVOCATION;
import static org.testng.internal.invokers.InvokedMethodListenerMethod.BEFORE_INVOCATION;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.testng.DataProviderHolder;
import org.testng.IClassListener;
import org.testng.IDataProviderListener;
import org.testng.IHookable;
import org.testng.IInvokedMethod;
import org.testng.IInvokedMethodListener;
import org.testng.IRetryAnalyzer;
import org.testng.ISuite;
import org.testng.ITestClass;
import org.testng.ITestContext;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.Reporter;
import org.testng.SuiteRunState;
import org.testng.SuiteRunner;
import org.testng.TestException;
import org.testng.TestNGException;
import org.testng.collections.Lists;
import org.testng.collections.Maps;
import org.testng.collections.Sets;
import org.testng.internal.GroupConfigMethodArguments.Builder;
import org.testng.internal.InvokeMethodRunnable.TestNGRuntimeException;
import org.testng.internal.ParameterHandler.ParameterBag;
import org.testng.internal.thread.ThreadExecutionException;
import org.testng.internal.thread.ThreadUtil;
import org.testng.thread.IWorker;
import org.testng.xml.XmlSuite;
class TestInvoker extends BaseInvoker implements ITestInvoker {
private final ConfigInvoker invoker;
private final DataProviderHolder holder;
private final List<IClassListener> m_classListeners;
private final boolean m_skipFailedInvocationCounts;
public TestInvoker(ITestResultNotifier m_notifier,
ITestContext m_testContext, SuiteRunState m_suiteState,
IConfiguration m_configuration, Collection<IInvokedMethodListener> m_invokedMethodListeners,
DataProviderHolder holder, List<IClassListener> m_classListeners,
boolean m_skipFailedInvocationCounts,
ConfigInvoker invoker) {
super(m_notifier, m_invokedMethodListeners, m_testContext, m_suiteState, m_configuration
);
this.holder = holder;
this.m_classListeners = m_classListeners;
this.m_skipFailedInvocationCounts = m_skipFailedInvocationCounts;
this.invoker = invoker;
}
@Override
public ITestResultNotifier getNotifier() {
return m_notifier;
}
public List<ITestResult> invokeTestMethods(ITestNGMethod testMethod,
ConfigurationGroupMethods groupMethods,
Object instance,
ITestContext context) {
// Potential bug here if the test method was declared on a parent class
if (testMethod.getTestClass() == null) {
throw new IllegalArgumentException(
"COULDN'T FIND TESTCLASS FOR " + testMethod.getRealClass());
}
XmlSuite suite = context.getSuite().getXmlSuite();
if (!MethodHelper.isEnabled(
testMethod.getConstructorOrMethod().getMethod(), annotationFinder())) {
// return if the method is not enabled. No need to do any more calculations
return Collections.emptyList();
}
Map<String, String> parameters = testMethod.findMethodParameters(context.getCurrentXmlTest());
// By the time this testMethod to be invoked,
// all dependencies should be already run or we need to skip this method,
// so invocation count should not affect dependencies check
String okToProceed = checkDependencies(testMethod, context.getAllTestMethods());
if (okToProceed != null) {
//
// Not okToProceed. Test is being skipped
//
ITestResult result =
registerSkippedTestResult(
testMethod, System.currentTimeMillis(), new Throwable(okToProceed));
m_notifier.addSkippedTest(testMethod, result);
InvokedMethod invokedMethod = new InvokedMethod(result.getInstance(), testMethod,
System.currentTimeMillis(), result);
invokeListenersForSkippedTestResult(result, invokedMethod);
testMethod.incrementCurrentInvocationCount();
GroupConfigMethodArguments args = new Builder()
.forTestMethod(testMethod)
.withGroupConfigMethods(groupMethods)
.forSuite(suite)
.forInstance(instance)
.withParameters(parameters)
.build();
this.invoker.invokeAfterGroupsConfigurations(args);
return Collections.singletonList(result);
}
// For invocationCount > 1 and threadPoolSize > 1 run this method in its own pool thread.
if (testMethod.getInvocationCount() > 1 && testMethod.getThreadPoolSize() > 1) {
return invokePooledTestMethods(testMethod, suite, parameters, groupMethods, context);
}
long timeOutInvocationCount = testMethod.getInvocationTimeOut();
// FIXME: Is this correct?
boolean onlyOne = testMethod.getThreadPoolSize() > 1 || timeOutInvocationCount > 0;
ITestClass testClass = testMethod.getTestClass();
ITestNGMethod[] beforeMethods =
TestNgMethodUtils.filterBeforeTestMethods(testClass, CAN_RUN_FROM_CLASS);
ITestNGMethod[] afterMethods =
TestNgMethodUtils.filterAfterTestMethods(testClass, CAN_RUN_FROM_CLASS);
int invocationCount = onlyOne ? 1 : testMethod.getInvocationCount();
TestMethodArguments arguments = new TestMethodArguments.Builder()
.usingInstance(instance)
.forTestMethod(testMethod)
.withParameters(parameters)
.forTestClass(testClass)
.usingBeforeMethods(beforeMethods)
.usingAfterMethods(afterMethods)
.usingGroupMethods(groupMethods)
.build();
MethodInvocationAgent agent = new MethodInvocationAgent(arguments, this, context);
while (invocationCount-- > 0) {
invocationCount = agent.invoke(invocationCount);
}
return agent.getResult();
}
/**
* invokeTestMethods() eventually converge here to invoke a single @Test method.
*
* <p>This method is responsible for actually invoking the method. It decides if the invocation
* must be done:
*
* <ul>
* <li>through an <code>IHookable</code>
* <li>directly (through reflection)
* <li>in a separate thread (in case it needs to timeout)
* </ul>
*
* <p>This method is also responsible for
* invoking @BeforeGroup, @BeforeMethod, @AfterMethod, @AfterGroup if it is the case for the
* passed in @Test method.
*/
public ITestResult invokeTestMethod(TestMethodArguments arguments,
XmlSuite suite, FailureContext failureContext) {
// Mark this method with the current thread id
arguments.getTestMethod().setId(ThreadUtil.currentThreadInfo());
return invokeMethod(arguments, suite, failureContext);
}
public FailureContext retryFailed(
TestMethodArguments arguments, List<ITestResult> result,
int failureCount,
ITestContext testContext) {
FailureContext failure = new FailureContext();
failure.count = failureCount;
do {
failure.instances = Lists.newArrayList();
Map<String, String> allParameters = Maps.newHashMap();
// TODO: This recreates all the parameters every time when we only need
// one specific set. Should optimize it by only recreating the set needed.
ParameterHandler handler = new ParameterHandler(annotationFinder(), this.holder);
ParameterBag bag = handler.createParameters(arguments.getTestMethod(),
arguments.getParameters(), allParameters, testContext);
Object[] parameterValues =
Parameters.getParametersFromIndex(Objects.requireNonNull(bag.parameterHolder).parameters,
arguments.getParametersIndex());
if (bag.parameterHolder.origin == ParameterHolder.ParameterOrigin.NATIVE) {
parameterValues = arguments.getParameterValues();
}
TestMethodArguments tma = new TestMethodArguments.Builder()
.usingArguments(arguments)
.withParameterValues(parameterValues)
.withParameters(allParameters)
.build();
result.add(invokeMethod(tma, testContext.getSuite().getXmlSuite(), failure));
} while (!failure.instances.isEmpty());
return failure;
}
public void runTestResultListener(ITestResult tr) {
TestListenerHelper.runTestListeners(tr, m_notifier.getTestListeners());
}
private Collection<IDataProviderListener> dataProviderListeners() {
ISuite suite = this.m_testContext.getSuite();
Collection<IDataProviderListener> dpListeners = Sets.newHashSet(this.holder.getListeners());
if (suite instanceof SuiteRunner) {
Collection<IDataProviderListener> listeners = ((SuiteRunner) suite)
.getDataProviderListeners();
dpListeners.addAll(listeners);
}
return dpListeners;
}
private DataProviderHolder buildDataProviderHolder() {
DataProviderHolder holder = new DataProviderHolder();
holder.addListeners(dataProviderListeners());
holder.addInterceptors(this.holder.getInterceptors());
return holder;
}
/**
* Checks to see of the test method has certain dependencies that prevents TestNG from executing
* it
*
* @param testMethod test method being checked for
* @return error message or null if dependencies have been run successfully
*/
private String checkDependencies(ITestNGMethod testMethod, ITestNGMethod[] allTestMethods) {
// If this method is marked alwaysRun, no need to check for its dependencies
if (testMethod.isAlwaysRun()) {
return null;
}
// Any missing group?
if (testMethod.getMissingGroup() != null && !testMethod.ignoreMissingDependencies()) {
return "Method "
+ testMethod
+ " depends on nonexistent group \""
+ testMethod.getMissingGroup()
+ "\"";
}
// If this method depends on groups, collect all the methods that
// belong to these groups and make sure they have been run successfully
String[] groups = testMethod.getGroupsDependedUpon();
if (null != groups && groups.length > 0) {
// Get all the methods that belong to the group depended upon
for (String element : groups) {
ITestNGMethod[] methods =
MethodGroupsHelper.findMethodsThatBelongToGroup(
testMethod, m_testContext.getAllTestMethods(), element);
if (methods.length == 0 && !testMethod.ignoreMissingDependencies()) {
// Group is missing
return "Method " + testMethod + " depends on nonexistent group \"" + element + "\"";
}
if (failuresPresentInUpstreamDependency(testMethod, methods)) {
return "Method "
+ testMethod
+ " depends on not successfully finished methods in group \""
+ element
+ "\"";
}
}
} // depends on groups
// If this method depends on other methods, make sure all these other
// methods have been run successfully
if (TestNgMethodUtils.cannotRunMethodIndependently(testMethod)) {
ITestNGMethod[] methods = MethodHelper.findDependedUponMethods(testMethod, allTestMethods);
if (failuresPresentInUpstreamDependency(testMethod, methods)) {
return "Method " + testMethod + " depends on not successfully finished methods";
}
}
return null;
}
/**
* To reduce thread contention and also to correctly handle thread-confinement this method invokes
* the @BeforeGroups and @AfterGroups corresponding to the current @Test method.
*/
private List<ITestResult> runWorkers(
ITestNGMethod testMethod,
List<IWorker<ITestNGMethod>> workers,
int threadPoolSize,
ConfigurationGroupMethods groupMethods,
XmlSuite suite,
Map<String, String> parameters) {
// Invoke @BeforeGroups on the original method (reduce thread contention,
// and also solve thread confinement)
ITestClass testClass = testMethod.getTestClass();
Object[] instances = testClass.getInstances(true);
for (Object instance : instances) {
GroupConfigMethodArguments arguments = new GroupConfigMethodArguments.Builder()
.forTestMethod(testMethod)
.withGroupConfigMethods(groupMethods)
.forSuite(suite)
.withParameters(parameters)
.forInstance(instance)
.build();
invoker.invokeBeforeGroupsConfigurations(arguments);
}
long maxTimeOut = -1; // 10 seconds
for (IWorker<ITestNGMethod> tmw : workers) {
long mt = tmw.getTimeOut();
if (mt > maxTimeOut) {
maxTimeOut = mt;
}
}
ThreadUtil.execute("methods", workers, threadPoolSize, maxTimeOut, true);
//
// Collect all the TestResults
//
List<ITestResult> result = Lists.newArrayList();
for (IWorker<ITestNGMethod> tmw : workers) {
if (tmw instanceof TestMethodWorker) {
result.addAll(((TestMethodWorker) tmw).getTestResults());
}
}
for (Object instance : instances) {
GroupConfigMethodArguments arguments = new GroupConfigMethodArguments.Builder()
.forTestMethod(testMethod)
.withGroupConfigMethods(groupMethods)
.forSuite(suite)
.withParameters(parameters)
.forInstance(instance)
.build();
invoker.invokeAfterGroupsConfigurations(arguments);
}
return result;
}
private boolean failuresPresentInUpstreamDependency(ITestNGMethod testMethod, ITestNGMethod[] methods) {
// Make sure the method has been run successfully
for (ITestNGMethod method : methods) {
Set<ITestResult> results = keepSameInstances(testMethod, m_notifier.getPassedTests(method));
Set<ITestResult> failedAndSkippedMethods = Sets.newHashSet();
Set<ITestResult> skippedAttempts = m_notifier.getSkippedTests(method);
failedAndSkippedMethods.addAll(m_notifier.getFailedTests(method));
failedAndSkippedMethods.addAll(skippedAttempts);
Set<ITestResult> failedresults = keepSameInstances(testMethod, failedAndSkippedMethods);
boolean wasMethodRetried = !results.isEmpty() && !skippedAttempts.isEmpty();
if (!wasMethodRetried && !failedresults.isEmpty()) {
// If failed results were returned on the same instance, then these tests didn't pass
return true;
}
for (ITestResult result : results) {
if (!result.isSuccess()) {
return true;
}
}
}
return false;
}
/** @return the test results that apply to one of the instances of the testMethod. */
private Set<ITestResult> keepSameInstances(ITestNGMethod method, Set<ITestResult> results) {
Set<ITestResult> result = Sets.newHashSet();
for (ITestResult r : results) {
Object o = method.getInstance();
// Keep this instance if 1) It's on a different class or 2) It's on the same class
// and on the same instance
Object instance = r.getInstance() != null ? r.getInstance() : r.getMethod().getInstance();
if (r.getTestClass() != method.getTestClass() || instance == o) {
result.add(r);
}
}
return result;
}
/** Invokes a method that has a specified threadPoolSize. */
private List<ITestResult> invokePooledTestMethods(
ITestNGMethod testMethod,
XmlSuite suite,
Map<String, String> parameters,
ConfigurationGroupMethods groupMethods,
ITestContext testContext) {
//
// Create the workers
//
List<IWorker<ITestNGMethod>> workers = Lists.newArrayList();
// Create one worker per invocationCount
for (int i = 0; i < testMethod.getInvocationCount(); i++) {
// we use clones for reporting purposes
ITestNGMethod clonedMethod = testMethod.clone();
clonedMethod.setInvocationCount(1);
clonedMethod.setThreadPoolSize(1);
MethodInstance mi = new MethodInstance(clonedMethod);
workers.add(new SingleTestMethodWorker(this, invoker, mi, parameters, testContext, m_classListeners));
}
return runWorkers(
testMethod, workers, testMethod.getThreadPoolSize(), groupMethods, suite, parameters);
}
private void collectResults(ITestNGMethod testMethod, ITestResult result) {
// Collect the results
int status = result.getStatus();
if (ITestResult.SUCCESS == status) {
m_notifier.addPassedTest(testMethod, result);
} else if (ITestResult.SKIP == status) {
m_notifier.addSkippedTest(testMethod, result);
} else if (ITestResult.FAILURE == status) {
m_notifier.addFailedTest(testMethod, result);
} else if (ITestResult.SUCCESS_PERCENTAGE_FAILURE == status) {
m_notifier.addFailedButWithinSuccessPercentageTest(testMethod, result);
} else {
assert false : "UNKNOWN STATUS:" + status;
}
}
public void invokeListenersForSkippedTestResult(ITestResult r, IInvokedMethod invokedMethod) {
if (m_configuration.alwaysRunListeners()) {
runInvokedMethodListeners(BEFORE_INVOCATION, invokedMethod, r);
runInvokedMethodListeners(AFTER_INVOCATION, invokedMethod, r);
}
runTestResultListener(r);
}
private static void setTestStatus(ITestResult result, int status) {
// set the test to success as long as the testResult hasn't been changed by the user via
// Reporter.getCurrentTestResult
if (result.getStatus() == ITestResult.STARTED) {
result.setStatus(status);
}
}
private static class StatusHolder {
boolean handled = false;
int status;
}
private void handleInvocationResults(
ITestNGMethod testMethod,
ITestResult testResult,
FailureContext failure,
StatusHolder holder,
boolean wasResultUnaltered) {
//
// Go through all the results and create a TestResult for each of them
//
List<ITestResult> resultsToRetry = Lists.newArrayList();
Throwable ite = testResult.getThrowable();
int status =
computeTestStatusComparingTestResultAndStatusHolder(testResult, holder, wasResultUnaltered);
boolean handled = holder.handled;
IRetryAnalyzer retryAnalyzer = testMethod.getRetryAnalyzer(testResult);
boolean willRetry =
retryAnalyzer != null
&& status == ITestResult.FAILURE
&& failure.instances != null
&& retryAnalyzer.retry(testResult);
if (willRetry) {
resultsToRetry.add(testResult);
Object instance = testResult.getInstance();
if (!failure.instances.contains(instance)) {
failure.instances.add(instance);
}
testResult.setStatus(ITestResult.SKIP);
testResult.setWasRetried(true);
} else {
testResult.setStatus(status);
if (status == ITestResult.FAILURE && !handled) {
int count = failure.count++;
if (testMethod.isDataDriven()) {
count = 0;
}
handleException(ite, testMethod, testResult, count);
}
}
}
// pass both paramValues and paramIndex to be thread safe in case parallel=true + dataprovider.
private ITestResult invokeMethod(
TestMethodArguments arguments, XmlSuite suite, FailureContext failureContext) {
TestResult testResult = TestResult.newEmptyTestResult();
GroupConfigMethodArguments cfgArgs = new GroupConfigMethodArguments.Builder()
.forTestMethod(arguments.getTestMethod())
.withGroupConfigMethods(arguments.getGroupMethods())
.forSuite(suite)
.withParameters(arguments.getParameters())
.forInstance(arguments.getInstance())
.build();
invoker.invokeBeforeGroupsConfigurations(cfgArgs);
ITestNGMethod[] setupConfigMethods =
TestNgMethodUtils.filterSetupConfigurationMethods(arguments.getTestMethod(),
arguments.getBeforeMethods());
runConfigMethods(arguments, suite, testResult, setupConfigMethods);
long startTime = System.currentTimeMillis();
InvokedMethod invokedMethod = new InvokedMethod(arguments.getInstance(),
arguments.getTestMethod(), startTime, testResult);
if (invoker.hasConfigurationFailureFor(
arguments.getTestMethod(), arguments.getTestMethod().getGroups() ,
arguments.getTestClass(),
arguments.getInstance())) {
Throwable exception = ExceptionUtils.getExceptionDetails(m_testContext,
arguments.getInstance());
ITestResult result = registerSkippedTestResult(arguments.getTestMethod(), System.currentTimeMillis(), exception);
TestResult.copyAttributes(testResult, result);
m_notifier.addSkippedTest(arguments.getTestMethod(), result);
arguments.getTestMethod().incrementCurrentInvocationCount();
testResult.setMethod(arguments.getTestMethod());
invokedMethod = new InvokedMethod(arguments.getInstance(),
arguments.getTestMethod(), startTime, result);
invokeListenersForSkippedTestResult(result, invokedMethod);
runAfterGroupsConfigurations(arguments, suite, testResult);
return result;
}
//
// Create the ExtraOutput for this method
//
try {
testResult = TestResult
.newTestResultFrom(testResult, arguments.getTestMethod(), m_testContext, System.currentTimeMillis());
//Recreate the invoked method object again, because we now have a new test result object
invokedMethod = new InvokedMethod(arguments.getInstance(),
arguments.getTestMethod(), invokedMethod.getDate(), testResult);
testResult.setParameters(arguments.getParameterValues());
testResult.setParameterIndex(arguments.getParametersIndex());
testResult.setHost(m_testContext.getHost());
testResult.setStatus(ITestResult.STARTED);
Reporter.setCurrentTestResult(testResult);
// Fix from ansgarkonermann
// invokedMethod is used in the finally, which can be invoked if
// any of the test listeners throws an exception, therefore,
// invokedMethod must have a value before we get here
if (!m_suiteState.isFailed()) {
runTestResultListener(testResult);
}
log(3, "Invoking " + arguments.getTestMethod().getQualifiedName());
runInvokedMethodListeners(BEFORE_INVOCATION, invokedMethod, testResult);
m_notifier.addInvokedMethod(invokedMethod);
Method thisMethod = arguments.getTestMethod().getConstructorOrMethod().getMethod();
if (RuntimeBehavior.isDryRun()) {
setTestStatus(testResult, ITestResult.SUCCESS);
return testResult;
}
// If this method is a IHookable, invoke its run() method
IHookable hookableInstance =
IHookable.class.isAssignableFrom(arguments.getTestMethod().getRealClass())
? (IHookable) arguments.getInstance()
: m_configuration.getHookable();
if (MethodHelper.calculateTimeOut(arguments.getTestMethod()) <= 0) {
if (hookableInstance != null) {
MethodInvocationHelper.invokeHookable(
arguments.getInstance(), arguments.getParameterValues(), hookableInstance, thisMethod, testResult);
} else {
// Not a IHookable, invoke directly
MethodInvocationHelper.invokeMethod(thisMethod, arguments.getInstance(),
arguments.getParameterValues());
}
setTestStatus(testResult, ITestResult.SUCCESS);
} else {
// Method with a timeout
MethodInvocationHelper.invokeWithTimeout(
arguments.getTestMethod(), arguments.getInstance(),
arguments.getParameterValues(), testResult, hookableInstance);
}
} catch (InvocationTargetException ite) {
testResult.setThrowable(ite.getCause());
setTestStatus(testResult, ITestResult.FAILURE);
} catch (ThreadExecutionException tee) { // wrapper for TestNGRuntimeException
Throwable cause = tee.getCause();
if (TestNGRuntimeException.class.equals(cause.getClass())) {
testResult.setThrowable(cause.getCause());
} else {
testResult.setThrowable(cause);
}
setTestStatus(testResult, ITestResult.FAILURE);
} catch (Throwable thr) { // covers the non-wrapper exceptions
testResult.setThrowable(thr);
setTestStatus(testResult, ITestResult.FAILURE);
} finally {
// Set end time ASAP
testResult.setEndMillis(System.currentTimeMillis());
ExpectedExceptionsHolder expectedExceptionClasses =
new ExpectedExceptionsHolder(
annotationFinder(), arguments.getTestMethod(), new RegexpExpectedExceptionsHolder(annotationFinder(),
arguments.getTestMethod()));
StatusHolder holder =
considerExceptions(arguments.getTestMethod(), testResult, expectedExceptionClasses, failureContext);
int statusBeforeListenerInvocation = testResult.getStatus();
runInvokedMethodListeners(AFTER_INVOCATION, invokedMethod, testResult);
boolean wasResultUnaltered = statusBeforeListenerInvocation == testResult.getStatus();
handleInvocationResults(arguments.getTestMethod(), testResult, failureContext, holder, wasResultUnaltered);
// If this method has a data provider and just failed, memorize the number
// at which it failed.
// Note: we're not exactly testing that this method has a data provider, just
// that it has parameters, so might have to revisit this if bugs get reported
// for the case where this method has parameters that don't come from a data
// provider
if (testResult.getThrowable() != null && arguments.getParameterValues().length > 0) {
arguments.getTestMethod()
.addFailedInvocationNumber(arguments.getParametersIndex());
}
//
// Increment the invocation count for this method
//
arguments.getTestMethod().incrementCurrentInvocationCount();
runTestResultListener(testResult);
collectResults(arguments.getTestMethod(), testResult);
runAfterGroupsConfigurations(
arguments, suite, testResult);
// Reset the test result last. If we do this too early, Reporter.log()
// invocations from listeners will be discarded
Reporter.setCurrentTestResult(null);
}
return testResult;
}
private void runAfterGroupsConfigurations(TestMethodArguments arguments,
XmlSuite suite, TestResult testResult) {
ITestNGMethod[] teardownConfigMethods =
TestNgMethodUtils.filterTeardownConfigurationMethods(arguments.getTestMethod(),
arguments.getAfterMethods());
runConfigMethods(arguments, suite, testResult, teardownConfigMethods);
GroupConfigMethodArguments grpArgs = new GroupConfigMethodArguments.Builder()
.forTestMethod(arguments.getTestMethod())
.withGroupConfigMethods(arguments.getGroupMethods())
.forSuite(suite)
.withParameters(arguments.getParameters())
.forInstance(arguments.getInstance())
.build();
invoker.invokeAfterGroupsConfigurations(grpArgs);
}
private void runConfigMethods(TestMethodArguments arguments,
XmlSuite suite, TestResult testResult, ITestNGMethod[] teardownConfigMethods) {
ConfigMethodArguments cfgArgs = new ConfigMethodArguments.Builder()
.forTestClass(arguments.getTestClass())
.forTestMethod(arguments.getTestMethod())
.usingConfigMethodsAs(teardownConfigMethods)
.forSuite(suite)
.usingParameters(arguments.getParameters())
.usingParameterValues(arguments.getParameterValues())
.usingInstance(arguments.getInstance())
.withResult(testResult)
.build();
invoker.invokeConfigurations(cfgArgs);
}
public ITestResult registerSkippedTestResult(
ITestNGMethod testMethod, long start, Throwable throwable) {
ITestResult result =
TestResult.newEndTimeAwareTestResult(testMethod, m_testContext, throwable, start);
result.setStatus(ITestResult.STARTED);
runTestResultListener(result);
result.setStatus(TestResult.SKIP);
Reporter.setCurrentTestResult(result);
return result;
}
private StatusHolder considerExceptions(
ITestNGMethod tm,
ITestResult testresult,
ExpectedExceptionsHolder exceptionsHolder,
FailureContext failure) {
StatusHolder holder = new StatusHolder();
holder.status = testresult.getStatus();
holder.handled = false;
Throwable ite = testresult.getThrowable();
if (holder.status == ITestResult.FAILURE && ite != null) {
// Invocation caused an exception, see if the method was annotated with @ExpectedException
if (exceptionsHolder != null) {
if (exceptionsHolder.isExpectedException(ite)) {
testresult.setStatus(ITestResult.SUCCESS);
holder.status = ITestResult.SUCCESS;
} else {
if (isSkipExceptionAndSkip(ite)) {
holder.status = ITestResult.SKIP;
} else {
testresult.setThrowable(exceptionsHolder.wrongException(ite));
holder.status = ITestResult.FAILURE;
}
}
} else {
handleException(ite, tm, testresult, failure.count++);
holder.handled = true;
holder.status = testresult.getStatus();
}
} else if (holder.status != ITestResult.SKIP && exceptionsHolder != null) {
TestException exception = exceptionsHolder.noException(tm);
if (exception != null) {
testresult.setThrowable(exception);
holder.status = ITestResult.FAILURE;
}
}
return holder;
}
private static int computeTestStatusComparingTestResultAndStatusHolder(
ITestResult testResult, StatusHolder holder, boolean wasResultUnaltered) {
if (wasResultUnaltered) {
return holder.status;
}
return testResult.getStatus();
}
private class MethodInvocationAgent {
private final ITestContext context;
private final List<ITestResult> result = Lists.newArrayList();
private final FailureContext failure = new FailureContext();
private final ITestInvoker invoker;
private final TestMethodArguments arguments;
public MethodInvocationAgent(TestMethodArguments arguments, ITestInvoker invoker, ITestContext context) {
this.arguments = arguments;
this.invoker = invoker;
this.context = context;
}
public List<ITestResult> getResult() {
return result;
}
public int invoke(int invCount) {
AtomicInteger invocationCount = new AtomicInteger(invCount);
long start = System.currentTimeMillis();
Map<String, String> allParameterNames = Maps.newHashMap();
ParameterHandler handler = new ParameterHandler(annotationFinder(), buildDataProviderHolder());
ParameterBag bag = handler.createParameters(
arguments.getTestMethod(),
arguments.getParameters(),
allParameterNames,
context,
arguments.getInstance());
if (bag.hasErrors()) {
ITestResult tr = bag.errorResult;
Throwable throwable = Objects.requireNonNull(tr).getThrowable();
if (throwable instanceof TestNGException) {
tr.setStatus(ITestResult.FAILURE);
m_notifier.addFailedTest(arguments.getTestMethod(), tr);
} else {
tr.setStatus(ITestResult.SKIP);
m_notifier.addSkippedTest(arguments.getTestMethod(), tr);
}
runTestResultListener(tr);
result.add(tr);
return invocationCount.get();
}
Iterator<Object[]> allParameterValues = Objects.requireNonNull(bag.parameterHolder).parameters;
try {
IMethodRunner runner = this.invoker.getRunner();
if (bag.runInParallel()) {
List<ITestResult> parallel = runner.runInParallel(arguments,
this.invoker, context, invocationCount, failure,
allParameterValues, m_skipFailedInvocationCounts);
result.addAll(parallel);
} else {
List<ITestResult> sequential = runner.runInSequence(arguments,
this.invoker, context, invocationCount, failure,
allParameterValues, m_skipFailedInvocationCounts);
result.addAll(sequential);
}
} catch (Throwable cause) {
ITestResult r =
TestResult.newEndTimeAwareTestResult(arguments.getTestMethod(), m_testContext, cause, start);
r.setStatus(TestResult.FAILURE);
result.add(r);
runTestResultListener(r);
m_notifier.addFailedTest(arguments.getTestMethod(), r);
} // catch
return invocationCount.get();
}
}
}
|
3e0e8a2b5f4e0b721b02844edc7b1024dc7dc412 | 1,001 | java | Java | config-model/src/test/java/com/yahoo/vespa/model/container/search/searchchain/SearchChainsTestBase.java | t1707/vespa | 9f4859e9996ac9913ce80ed9b209f683507fe157 | [
"Apache-2.0"
] | null | null | null | config-model/src/test/java/com/yahoo/vespa/model/container/search/searchchain/SearchChainsTestBase.java | t1707/vespa | 9f4859e9996ac9913ce80ed9b209f683507fe157 | [
"Apache-2.0"
] | 1 | 2021-01-21T01:37:37.000Z | 2021-01-21T01:37:37.000Z | config-model/src/test/java/com/yahoo/vespa/model/container/search/searchchain/SearchChainsTestBase.java | t1707/vespa | 9f4859e9996ac9913ce80ed9b209f683507fe157 | [
"Apache-2.0"
] | null | null | null | 38.5 | 118 | 0.759241 | 6,175 | // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.model.container.search.searchchain;
import com.yahoo.binaryprefix.BinaryPrefix;
import com.yahoo.binaryprefix.BinaryScaledAmount;
import com.yahoo.config.model.builder.xml.test.DomBuilderTest;
import com.yahoo.vespa.model.builder.xml.dom.chains.search.DomSearchChainsBuilder;
import org.junit.Before;
import org.w3c.dom.Element;
/** Creates SearchChains model from xml input.
* @author tonytv
*/
public abstract class SearchChainsTestBase extends DomBuilderTest {
@Before
public void setupSearchChains() {
SearchChains searchChains = new DomSearchChainsBuilder().build(root, servicesXml());
searchChains.initialize(MockSearchClusters.twoMockClusterSpecsByName(root),
new BinaryScaledAmount(100, BinaryPrefix.mega));
root.freezeModelTopology();
}
abstract Element servicesXml();
}
|
3e0e8baf9a68e46c8ffdccf055c849fc147389db | 4,768 | java | Java | src/main/java/seedu/divelog/logic/parser/AddCommandParser.java | cjunxiang/main | 9df98f1b03f7e3752579d4b0744f279ced8be199 | [
"MIT"
] | null | null | null | src/main/java/seedu/divelog/logic/parser/AddCommandParser.java | cjunxiang/main | 9df98f1b03f7e3752579d4b0744f279ced8be199 | [
"MIT"
] | null | null | null | src/main/java/seedu/divelog/logic/parser/AddCommandParser.java | cjunxiang/main | 9df98f1b03f7e3752579d4b0744f279ced8be199 | [
"MIT"
] | null | null | null | 46.745098 | 119 | 0.686871 | 6,176 | package seedu.divelog.logic.parser;
import java.util.stream.Stream;
import org.json.JSONException;
import seedu.divelog.commons.core.Messages;
import seedu.divelog.commons.util.CompareUtil;
import seedu.divelog.logic.commands.AddCommand;
import seedu.divelog.logic.parser.exceptions.ParseException;
import seedu.divelog.logic.pressuregroup.PressureGroupLogic;
import seedu.divelog.logic.pressuregroup.exceptions.LimitExceededException;
import seedu.divelog.model.dive.DepthProfile;
import seedu.divelog.model.dive.DiveSession;
import seedu.divelog.model.dive.Location;
import seedu.divelog.model.dive.OurDate;
import seedu.divelog.model.dive.PressureGroup;
import seedu.divelog.model.dive.Time;
import seedu.divelog.model.dive.TimeZone;
/**
* Parses input arguments and creates a new AddCommand object.
*/
public class AddCommandParser implements Parser<AddCommand> {
/**
* Parses the given {@code String} of arguments in the context of the AddCommand
* and returns an AddCommand object for execution.
* @throws ParseException if the user input does not conform the expected format.
*/
public AddCommand parse(String args) throws ParseException {
ArgumentMultimap argMultimap =
ArgumentTokenizer.tokenize(args,
CliSyntax.PREFIX_DATE_START,
CliSyntax.PREFIX_TIME_START,
CliSyntax.PREFIX_DATE_END,
CliSyntax.PREFIX_TIME_END,
CliSyntax.PREFIX_SAFETY_STOP,
CliSyntax.PREFIX_DEPTH,
CliSyntax.PREFIX_PRESSURE_GROUP_START,
CliSyntax.PREFIX_LOCATION,
CliSyntax.PREFIX_TIMEZONE);
if (!arePrefixesPresent(argMultimap,
CliSyntax.PREFIX_DATE_START,
CliSyntax.PREFIX_TIME_START,
CliSyntax.PREFIX_DATE_END,
CliSyntax.PREFIX_TIME_END,
CliSyntax.PREFIX_SAFETY_STOP,
CliSyntax.PREFIX_DEPTH,
CliSyntax.PREFIX_PRESSURE_GROUP_START,
CliSyntax.PREFIX_LOCATION)
|| !argMultimap.getPreamble().isEmpty()) {
throw new ParseException(String.format(Messages.MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE));
}
ParserUtil.checkTimeformat(argMultimap);
ParserUtil.checkDateformat(argMultimap);
//ParserUtil.checkTimeZoneformat(argMultimap);
OurDate dateStart = new OurDate(argMultimap.getValue(CliSyntax.PREFIX_DATE_START).get());
Time startTime = new Time(argMultimap.getValue(CliSyntax.PREFIX_TIME_START).get());
OurDate dateEnd = new OurDate(argMultimap.getValue(CliSyntax.PREFIX_DATE_END).get());
Time endTime = new Time(argMultimap.getValue(CliSyntax.PREFIX_TIME_END).get());
Time safetyStop = new Time(argMultimap.getValue(CliSyntax.PREFIX_SAFETY_STOP).get());
PressureGroup pressureGroupAtBegining =
new PressureGroup(argMultimap.getValue(CliSyntax.PREFIX_PRESSURE_GROUP_START).get());
Location location =
new Location(argMultimap.getValue(CliSyntax.PREFIX_LOCATION).get());
DepthProfile depthProfile = ParserUtil.parseDepth(argMultimap.getValue(CliSyntax.PREFIX_DEPTH).get());
TimeZone timezone = new TimeZone(argMultimap.getValue(CliSyntax.PREFIX_TIMEZONE).get());
try {
long duration = CompareUtil.checkTimeDifference(startTime.getTimeString(), endTime.getTimeString(),
dateStart.getOurDateString(), dateEnd.getOurDateString());
PressureGroup pressureGroupAtEnd = PressureGroupLogic.computePressureGroup(depthProfile,
(float) duration, pressureGroupAtBegining);
DiveSession dive =
new DiveSession(dateStart, startTime, safetyStop, dateEnd, endTime, pressureGroupAtBegining,
pressureGroupAtEnd, location, depthProfile, timezone);
return new AddCommand(dive);
} catch (JSONException e) {
throw new ParseException(Messages.MESSAGE_INTERNAL_ERROR);
} catch (LimitExceededException l) {
throw new ParseException(AddCommand.MESSAGE_ERROR);
} catch (Exception e) {
throw new ParseException(Messages.MESSAGE_INTERNAL_ERROR);
}
}
/**
* Returns true if none of the prefixes contains empty {@code Optional} values in the given
* {@code ArgumentMultimap}.
*/
public static boolean arePrefixesPresent(ArgumentMultimap argumentMultimap, Prefix... prefixes) {
return Stream.of(prefixes).allMatch(prefix -> argumentMultimap.getValue(prefix).isPresent());
}
}
|
3e0e8be6233a1c3b05f479e81fd529d5201c65f2 | 344 | java | Java | src/test/java/com/librato/disco/FakeChildData.java | librato/disco-java | 529d9ff126194431fe035f1285d53aa5c1b9d565 | [
"BSD-3-Clause"
] | 20 | 2015-04-09T08:36:51.000Z | 2020-06-23T23:18:39.000Z | src/test/java/com/librato/disco/FakeChildData.java | librato/disco-java | 529d9ff126194431fe035f1285d53aa5c1b9d565 | [
"BSD-3-Clause"
] | 9 | 2015-01-16T16:12:25.000Z | 2016-04-18T20:30:47.000Z | src/test/java/com/librato/disco/FakeChildData.java | librato/disco-java | 529d9ff126194431fe035f1285d53aa5c1b9d565 | [
"BSD-3-Clause"
] | 9 | 2015-02-04T11:24:45.000Z | 2021-01-31T13:35:24.000Z | 22.933333 | 60 | 0.697674 | 6,177 | package com.librato.disco;
import org.apache.curator.framework.recipes.cache.ChildData;
public class FakeChildData extends ChildData {
public static ChildData newData(String data) {
return new FakeChildData(data);
}
public FakeChildData(String data) {
super("/path/data-" + data, null, data.getBytes());
}
}
|
3e0e8cd0636fd5c8eb8345e6d716c28499f233f3 | 3,184 | java | Java | core/src/main/java/io/github/spleefx/util/menu/MenuBuilder.java | Tominous/SpleefX | a44bdad63d0b4c6520b948fd9dcfa857ea11c6e4 | [
"CC0-1.0"
] | 1 | 2020-02-08T21:33:32.000Z | 2020-02-08T21:33:32.000Z | core/src/main/java/io/github/spleefx/util/menu/MenuBuilder.java | Tominous/SpleefX | a44bdad63d0b4c6520b948fd9dcfa857ea11c6e4 | [
"CC0-1.0"
] | null | null | null | core/src/main/java/io/github/spleefx/util/menu/MenuBuilder.java | Tominous/SpleefX | a44bdad63d0b4c6520b948fd9dcfa857ea11c6e4 | [
"CC0-1.0"
] | null | null | null | 27.213675 | 98 | 0.645729 | 6,178 | /*
* * Copyright 2020 github.com/ReflxctionDev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.spleefx.util.menu;
import org.bukkit.ChatColor;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.inventory.ItemStack;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
/**
* A builder-styled class for {@link Menu}
*
* @see Menu
*/
public class MenuBuilder {
/**
* The name of the inventory. Used for checking
*/
private final String name;
/**
* The inventory rows
*/
private final int rows;
/**
* A map for all items and their slots
*/
private final Map<Integer, ItemStack> items = new HashMap<>();
/**
* A map for all actions
*/
private final Map<Integer, Consumer<InventoryClickEvent>> actions = new HashMap<>();
/**
* Whether or not to cancel click events in the inventory
*/
private boolean cancelClickEvents = false;
/**
* Whether or not to close the inventory when an item has been clicked
*/
private boolean closeOnClick = false;
/**
* Creates a new {@link MenuBuilder} to have a menu of the specified name and rows
*
* @param name Name of the inventory
* @param rows Rows that the inventory has
*/
public MenuBuilder(String name, int rows) {
this.name = ChatColor.translateAlternateColorCodes('&', name);
this.rows = rows;
}
/**
* Adds an item to the slot, and adds an action to it
*
* @param item Item to add
* @param slot Slot of the item
* @param clickAction Action of this item when it is clicked
* @return A reference to this builder
*/
public MenuBuilder item(ItemStack item, int slot, Consumer<InventoryClickEvent> clickAction) {
items.put(slot, item);
actions.put(slot, clickAction);
return this;
}
/**
* Sets the menu to cancel any click event in this inventory
*
* @return A reference to this builder
*/
public MenuBuilder cancelClickEvents() {
cancelClickEvents = true;
return this;
}
/**
* Sets the menu to close when an item is clicked
*
* @return A reference to this builder
*/
public MenuBuilder closeOnClick() {
closeOnClick = true;
return this;
}
/**
* Constructs a {@link Menu} from this builder and registers it to be listened for
*
* @return The constructed menu
*/
public Menu build() {
return new Menu(rows, name, items, actions, cancelClickEvents, closeOnClick);
}
}
|
3e0e8da86dd9677eb1c789d576c37796864793f7 | 453 | java | Java | example/android/app/src/main/java/ru/innim/flutter_login_vk_example/MainActivity.java | AtixD/flutter_login_vk | 9b62eacd15f8c8911316fc4cee86a1cef86d6970 | [
"BSD-3-Clause"
] | 14 | 2020-06-23T13:15:18.000Z | 2022-03-30T11:03:00.000Z | example/android/app/src/main/java/ru/innim/flutter_login_vk_example/MainActivity.java | AtixD/flutter_login_vk | 9b62eacd15f8c8911316fc4cee86a1cef86d6970 | [
"BSD-3-Clause"
] | 14 | 2020-06-15T14:06:57.000Z | 2022-03-17T12:05:37.000Z | example/android/app/src/main/java/ru/innim/flutter_login_vk_example/MainActivity.java | AtixD/flutter_login_vk | 9b62eacd15f8c8911316fc4cee86a1cef86d6970 | [
"BSD-3-Clause"
] | 14 | 2020-06-23T13:15:20.000Z | 2022-02-14T15:26:54.000Z | 32.357143 | 78 | 0.8234 | 6,179 | package ru.innim.flutter_login_vk_example;
import androidx.annotation.NonNull;
import io.flutter.embedding.android.FlutterActivity;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.plugins.GeneratedPluginRegistrant;
public class MainActivity extends FlutterActivity {
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {
GeneratedPluginRegistrant.registerWith(flutterEngine);
}
}
|
3e0e8efab355d9a27f7db02b66e222a87e041c69 | 1,751 | java | Java | Backend/src/Entidades/Marca.java | xxxDarsxxx/Proyecto_Moviles | e8b6adc78f0d552010dbb5089b955fa4d05a2a7e | [
"MIT"
] | null | null | null | Backend/src/Entidades/Marca.java | xxxDarsxxx/Proyecto_Moviles | e8b6adc78f0d552010dbb5089b955fa4d05a2a7e | [
"MIT"
] | null | null | null | Backend/src/Entidades/Marca.java | xxxDarsxxx/Proyecto_Moviles | e8b6adc78f0d552010dbb5089b955fa4d05a2a7e | [
"MIT"
] | null | null | null | 21.353659 | 84 | 0.567676 | 6,180 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Entidades;
import java.io.Serializable;
import java.util.Objects;
/**
*
* @author david
*/
public class Marca implements Serializable{
String codigo;
String descripcion;
public Marca() {
}
public Marca(String codigo, String descripcion) {
this.codigo = codigo;
this.descripcion = descripcion;
}
public String getCodigo() {
return codigo;
}
public void setCodigo(String codigo) {
this.codigo = codigo;
}
public String getDescripcion() {
return descripcion;
}
public void setDescripcion(String descripcion) {
this.descripcion = descripcion;
}
@Override
public String toString() {
return "Marca{" + "codigo=" + codigo + ", descripcion=" + descripcion + '}';
}
@Override
public int hashCode() {
int hash = 3;
hash = 79 * hash + Objects.hashCode(this.codigo);
hash = 79 * hash + Objects.hashCode(this.descripcion);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Marca other = (Marca) obj;
if (!Objects.equals(this.codigo, other.codigo)) {
return false;
}
if (!Objects.equals(this.descripcion, other.descripcion)) {
return false;
}
return true;
}
}
|
3e0e8efd91d7e24ad3c6eb2ccb18e53e4dcf0039 | 2,046 | java | Java | src/interclasse/Salas.java | pedrogneri/Projeto_Interclasse | 65dc573c9a89d5791c18e361e6f50842d5dabe7b | [
"MIT"
] | 4 | 2018-11-14T14:17:38.000Z | 2019-03-21T14:52:21.000Z | src/interclasse/Salas.java | pedrogneri/Projeto_Interclasse | 65dc573c9a89d5791c18e361e6f50842d5dabe7b | [
"MIT"
] | null | null | null | src/interclasse/Salas.java | pedrogneri/Projeto_Interclasse | 65dc573c9a89d5791c18e361e6f50842d5dabe7b | [
"MIT"
] | 2 | 2018-10-17T22:01:42.000Z | 2019-03-21T14:52:28.000Z | 28.816901 | 114 | 0.636364 | 6,181 | package interclasse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class Salas { // Classe que monta 1 ou 2 grupos com base nas salas selecionadas de cada ano (Ex. Primeiros)
private List<String> salasParticipantes;
private List<String> grupo1;
private List<String> grupo2;
public Salas(){
this(new ArrayList<>(), new ArrayList<>(), new ArrayList<>());
}
public Salas(List<String> salasParticipantes, List<String> grupo1, List<String> grupo2) {
this.salasParticipantes = salasParticipantes;
this.grupo1 = grupo1;
this.grupo2 = grupo2;
}
public List<String> getGrupo1() {
return this.grupo1;
}
public void setGrupo1(List<String> grupo1) {
this.grupo1 = grupo1;
}
public List<String> getGrupo2() {
return this.grupo2;
}
public void setGrupo2(List<String> grupo2) {
this.grupo2 = grupo2;
}
public List<String> getSalasParticipantes() {
return this.salasParticipantes;
}
public void setSalasParticipantes(List<String> salasParticipantes) {
this.salasParticipantes = salasParticipantes;
}
public void gerarParticipantes(List<String> participantes){
setSalasParticipantes(participantes);
}
public void organizarParticipantes() {
setGrupo1(Arrays.asList("","",""));
List<String> salas = new ArrayList<>(getSalasParticipantes());
montarGrupo1(salas);
montarGrupo2(salas);
}
private void montarGrupo1(List<String> salas){
System.out.println(salas.size()); // OBS: somente para testes
if(salas.size() > 4) {
Collections.shuffle(salas);
for (int x = 0; x < getGrupo1().size(); x++)
getGrupo1().set(x, salas.get(x));
}else
setGrupo1(new ArrayList<>(salas));
}
private void montarGrupo2(List<String> salas){
salas.removeAll(getGrupo1());
setGrupo2(salas);
}
}
|
3e0e8f703112cd9f2a894db574d17804fc924a7f | 283 | java | Java | src/main/java/redstonetweaks/interfaces/mixin/RTIServerTickScheduler.java | SpaceWalkerRS/redstone-tweaks | 397fdde63c61a07baaae020f0eb99c1d2aa701b8 | [
"MIT"
] | 3 | 2020-08-26T06:17:49.000Z | 2021-07-25T05:06:54.000Z | src/main/java/redstonetweaks/interfaces/mixin/RTIServerTickScheduler.java | SpaceWalkerRS/redstone-tweaks | 397fdde63c61a07baaae020f0eb99c1d2aa701b8 | [
"MIT"
] | 2 | 2020-12-13T12:48:35.000Z | 2021-03-02T02:12:27.000Z | src/main/java/redstonetweaks/interfaces/mixin/RTIServerTickScheduler.java | SpaceWalkerRS/redstone-tweaks | 397fdde63c61a07baaae020f0eb99c1d2aa701b8 | [
"MIT"
] | 1 | 2020-08-26T06:29:55.000Z | 2020-08-26T06:29:55.000Z | 20.214286 | 79 | 0.798587 | 6,182 | package redstonetweaks.interfaces.mixin;
import net.minecraft.util.math.BlockPos;
public interface RTIServerTickScheduler {
public boolean hasScheduledTickAtTime(BlockPos pos, Object object, int delay);
public void startTicking();
public boolean tryContinueTicking();
}
|
3e0e8f72c7ed0a7a9081f60b6a69837cf7f143d6 | 270 | java | Java | src/edacc/model/InstanceSourceClassHasInstance.java | MalteSchledjewski/edacc_gui | f91fb0c06339b488cb1609d737e497905a419385 | [
"MIT"
] | 1 | 2019-07-18T15:19:29.000Z | 2019-07-18T15:19:29.000Z | src/edacc/model/InstanceSourceClassHasInstance.java | EDACC/edacc_gui | f91fb0c06339b488cb1609d737e497905a419385 | [
"MIT"
] | 1 | 2019-10-16T08:43:13.000Z | 2019-10-16T08:43:13.000Z | src/edacc/model/InstanceSourceClassHasInstance.java | MalteSchledjewski/edacc_gui | f91fb0c06339b488cb1609d737e497905a419385 | [
"MIT"
] | 2 | 2015-05-08T09:00:01.000Z | 2019-07-18T15:19:37.000Z | 15 | 62 | 0.703704 | 6,183 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edacc.model;
/**
*
* @author rretz
*/
public class InstanceSourceClassHasInstance extends Exception{
public InstanceSourceClassHasInstance() {
}
}
|
3e0e8fe3dca95083b3b9265c5c5a2e4214925368 | 642 | java | Java | jeefast-oms/src/main/java/com/lgy/oms/service/ITraceLogService.java | lgykuuga/jeefast2.0 | b86cb2aa4950972ed0efcc901f837e7bad0567fd | [
"MIT"
] | 22 | 2019-11-08T10:10:53.000Z | 2022-03-23T09:05:14.000Z | jeefast-oms/src/main/java/com/lgy/oms/service/ITraceLogService.java | lgykuuga/jeefast2.0 | b86cb2aa4950972ed0efcc901f837e7bad0567fd | [
"MIT"
] | 23 | 2019-11-22T10:48:32.000Z | 2021-09-20T20:53:42.000Z | jeefast-oms/src/main/java/com/lgy/oms/service/ITraceLogService.java | lgykuuga/jeefast2.0 | b86cb2aa4950972ed0efcc901f837e7bad0567fd | [
"MIT"
] | 8 | 2020-04-25T01:03:06.000Z | 2021-11-14T13:05:51.000Z | 16.894737 | 62 | 0.624611 | 6,184 | package com.lgy.oms.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.lgy.oms.domain.TraceLog;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* 订单轨迹日志 服务层
*
* @author lgy
* @date 2019-12-26
*/
public interface ITraceLogService extends IService<TraceLog> {
/**
* 增加订单操作记录
*
* @param entity
*/
void add(TraceLog entity);
/**
* 批量增加订单操作记录---建议单批次不要超过 100条
*
* @param list
*/
void batchAdd(List<TraceLog> list);
/**
* 查询订单操作记录
*
* @param traceLog 订单
*/
List<TraceLog> get(TraceLog traceLog);
} |
3e0e90370d9b34714c961d95349bdc26a9e3d526 | 2,110 | java | Java | src/main/java/io/barracks/commons/configuration/BarracksServiceClientExceptionHandler.java | barracksiot/common-libs | 7b312fac42a921c14b405ed517d0b13cdd052e50 | [
"MIT"
] | null | null | null | src/main/java/io/barracks/commons/configuration/BarracksServiceClientExceptionHandler.java | barracksiot/common-libs | 7b312fac42a921c14b405ed517d0b13cdd052e50 | [
"MIT"
] | null | null | null | src/main/java/io/barracks/commons/configuration/BarracksServiceClientExceptionHandler.java | barracksiot/common-libs | 7b312fac42a921c14b405ed517d0b13cdd052e50 | [
"MIT"
] | null | null | null | 44.893617 | 147 | 0.783886 | 6,185 | /*
* MIT License
*
* Copyright (c) 2017 Barracks Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.barracks.commons.configuration;
import cz.jirutka.spring.exhandler.handlers.ErrorMessageRestExceptionHandler;
import cz.jirutka.spring.exhandler.messages.ErrorMessage;
import io.barracks.commons.exceptions.BarracksServiceClientException;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import javax.servlet.http.HttpServletRequest;
public abstract class BarracksServiceClientExceptionHandler<T extends BarracksServiceClientException> extends ErrorMessageRestExceptionHandler<T> {
public BarracksServiceClientExceptionHandler() {
super(HttpStatus.INTERNAL_SERVER_ERROR);
}
@Override
public ResponseEntity<ErrorMessage> handleException(T ex, HttpServletRequest req) {
final ResponseEntity<ErrorMessage> responseEntity = super.handleException(ex, req);
return new ResponseEntity<>(responseEntity.getBody(), responseEntity.getHeaders(), ex.getCause().getStatusCode());
}
}
|
3e0e9073de7700d01ce046853f52ead95e7fb982 | 1,509 | java | Java | src/main/java/svenhjol/charm/smithing/compat/QuarkTallow.java | kotelkonrad/Charm | b4ec259610535d90acab87b2791c74facd6f84f1 | [
"MIT"
] | null | null | null | src/main/java/svenhjol/charm/smithing/compat/QuarkTallow.java | kotelkonrad/Charm | b4ec259610535d90acab87b2791c74facd6f84f1 | [
"MIT"
] | null | null | null | src/main/java/svenhjol/charm/smithing/compat/QuarkTallow.java | kotelkonrad/Charm | b4ec259610535d90acab87b2791c74facd6f84f1 | [
"MIT"
] | null | null | null | 29.588235 | 95 | 0.633532 | 6,186 | package svenhjol.charm.smithing.compat;
import net.minecraft.item.Item;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraft.item.ItemTool;
import net.minecraftforge.event.AnvilUpdateEvent;
import svenhjol.charm.smithing.feature.TallowIncreasesDurability;
import vazkii.quark.decoration.item.ItemTallow;
import java.util.Random;
public class QuarkTallow
{
public void onAnvilUpdate(AnvilUpdateEvent event)
{
ItemStack in = event.getLeft();
ItemStack combine = event.getRight();
if (in.isEmpty() || combine.isEmpty()) return;
Item i = in.getItem();
Item c = combine.getItem();
boolean repairable = i instanceof ItemTool
|| i instanceof ItemArmor
|| TallowIncreasesDurability.repairable.contains(i.getClass());
if (repairable && c instanceof ItemTallow) {
// get the current damage
int currentDamage = in.getItemDamage();
int maxDamage = in.getMaxDamage();
if (currentDamage == 0) {
event.setCanceled(true);
return;
}
ItemStack out = in.copy();
out.setRepairCost(in.getRepairCost() + (new Random().nextFloat() < 0.75f ? 1 : 0));
out.setItemDamage(currentDamage - (int)(maxDamage * 0.02f));
event.setOutput(out);
event.setCost(TallowIncreasesDurability.xpCost);
event.setMaterialCost(1);
}
}
}
|
3e0e90deae090ce9c00cc448ff7e1bab34f7435a | 997 | java | Java | src/main/java/cn/lmsite/imghub/utils/user/MD5Util.java | eoysky/imghub | acb8cdc12ed909585483262960db2db3463145ea | [
"MIT"
] | null | null | null | src/main/java/cn/lmsite/imghub/utils/user/MD5Util.java | eoysky/imghub | acb8cdc12ed909585483262960db2db3463145ea | [
"MIT"
] | null | null | null | src/main/java/cn/lmsite/imghub/utils/user/MD5Util.java | eoysky/imghub | acb8cdc12ed909585483262960db2db3463145ea | [
"MIT"
] | null | null | null | 30.212121 | 54 | 0.565697 | 6,187 | package cn.lmsite.imghub.utils.user;
import java.security.MessageDigest;
public class MD5Util {
private static final String SALT = "tamboo";
public static String encode(String password) {
password = password + SALT;
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (Exception e) {
throw new RuntimeException(e);
}
char[] charArray = password.toCharArray();
byte[] byteArray = new byte[charArray.length];
for (int i = 0; i < charArray.length; i++)
byteArray[i] = (byte) charArray[i];
byte[] md5Bytes = md5.digest(byteArray);
StringBuilder hexValue = new StringBuilder();
for (byte md5Byte : md5Bytes) {
int val = ((int) md5Byte) & 0xff;
if (val < 16) {
hexValue.append("0");
}
hexValue.append(Integer.toHexString(val));
}
return hexValue.toString();
}
}
|
3e0e91ca24391ae3f2f92cef8df3f873faeb720f | 2,923 | java | Java | demos/models/ATS-Forms/src/main/java/es/osoco/bbva/ats/forms/domain/service/ApplicationUpdatedService.java | osoco/comprendiendo-software-creando-herramientas | 64b07b29a876ce180b3ba03dfd1d1770d5fc6f6e | [
"CC0-1.0"
] | 1 | 2022-02-06T10:48:13.000Z | 2022-02-06T10:48:13.000Z | demos/models/ATS-Forms/src/main/java/es/osoco/bbva/ats/forms/domain/service/ApplicationUpdatedService.java | osoco/comprendiendo-software-creando-herramientas | 64b07b29a876ce180b3ba03dfd1d1770d5fc6f6e | [
"CC0-1.0"
] | null | null | null | demos/models/ATS-Forms/src/main/java/es/osoco/bbva/ats/forms/domain/service/ApplicationUpdatedService.java | osoco/comprendiendo-software-creando-herramientas | 64b07b29a876ce180b3ba03dfd1d1770d5fc6f6e | [
"CC0-1.0"
] | 1 | 2022-02-06T10:48:15.000Z | 2022-02-06T10:48:15.000Z | 44.969231 | 125 | 0.691413 | 6,188 | package es.osoco.bbva.ats.forms.domain.service;
import es.osoco.bbva.ats.forms.domain.aggregate.Answer;
import es.osoco.bbva.ats.forms.domain.aggregate.Application;
import es.osoco.bbva.ats.forms.domain.aggregate.ApplicationStatus;
import es.osoco.bbva.ats.forms.domain.aggregate.form.Form;
import es.osoco.bbva.ats.forms.domain.aggregate.RecoveryToken;
import es.osoco.bbva.ats.forms.domain.events.ApplicationUpdateStored;
import es.osoco.bbva.ats.forms.domain.events.ApplicationUpdated;
import es.osoco.bbva.ats.forms.domain.exception.ApplicationNotFoundException;
import es.osoco.bbva.ats.forms.domain.repository.FormRepository;
import java.time.ZonedDateTime;
import java.util.Map;
import java.util.function.Consumer;
public class ApplicationUpdatedService extends AbstractApplicationSubmitService implements Consumer<ApplicationUpdated> {
@Override
public void accept(ApplicationUpdated domainEvent) {
Application originApplication = domainEvent.getApplication();
Form form = FormRepository.getInstance().byID(originApplication.getFormId() + ":" + originApplication.getLanguage());
Application storedApplication = applicationRepository.byID(originApplication.getId());
if (storedApplication != null){
ZonedDateTime firstSubmissionDate = storedApplication.getFirstSubmissionDate();
RecoveryToken applicationToken = recoveryTokenRepository.byID(originApplication.getId());
Map<String, Answer> updatedAnswerMap = getUpdatedAnswerMap(originApplication, storedApplication, form);
ApplicationStatus applicationStatus = storedApplication.getStatus();
checkIfApplicationKeyIsEquals(originApplication, storedApplication);
String recoveryKey = null;
if (applicationToken != null) {
recoveryKey = applicationToken.getRecoveryKey();
}
Application application = Application.builder()
.version(1)
.answersById(updatedAnswerMap)
.recoveryKey(recoveryKey)
.language(originApplication.getLanguage())
.applicationKey(originApplication.getApplicationKey())
.applicantKey(originApplication.getApplicantKey())
.status(applicationStatus)
.contestId(originApplication.getContestId())
.formId(originApplication.getFormId())
.origin(originApplication.getOrigin())
.submissionDate(ZonedDateTime.now())
.firstSubmissionDate(firstSubmissionDate )
.build();
validateFields(application);
applicationRepository.save(application);
new ApplicationUpdateStored(application).emit();
}
else {
throw new ApplicationNotFoundException();
}
}
}
|
3e0e925bae9747c948a435140520e78db95458ee | 6,043 | java | Java | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/parser/parsing/statements/expressions/arguments/ArgumentList.java | liveqmock/platform-tools-idea | 1c4b76108add6110898a7e3f8f70b970e352d3d4 | [
"Apache-2.0"
] | 2 | 2015-05-08T15:07:10.000Z | 2022-03-09T05:47:53.000Z | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/parser/parsing/statements/expressions/arguments/ArgumentList.java | lshain-android-source/tools-idea | b37108d841684bcc2af45a2539b75dd62c4e283c | [
"Apache-2.0"
] | null | null | null | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/parser/parsing/statements/expressions/arguments/ArgumentList.java | lshain-android-source/tools-idea | b37108d841684bcc2af45a2539b75dd62c4e283c | [
"Apache-2.0"
] | 2 | 2017-04-24T15:48:40.000Z | 2022-03-09T05:48:05.000Z | 35.339181 | 119 | 0.649512 | 6,189 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.parser.parsing.statements.expressions.arguments;
import com.intellij.lang.PsiBuilder;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import org.jetbrains.plugins.groovy.GroovyBundle;
import org.jetbrains.plugins.groovy.lang.lexer.TokenSets;
import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
import org.jetbrains.plugins.groovy.lang.parser.GroovyParser;
import org.jetbrains.plugins.groovy.lang.parser.parsing.statements.expressions.AssignmentExpression;
import org.jetbrains.plugins.groovy.lang.parser.parsing.statements.expressions.primary.PrimaryExpression;
import org.jetbrains.plugins.groovy.lang.parser.parsing.util.ParserUtils;
/**
* @author ilyas
*/
public class ArgumentList implements GroovyElementTypes {
private static final TokenSet CONTROL_KEYWORDS = TokenSet.create(kASSERT, kBREAK, kCASE, kCLASS,
kCONTINUE, kDEF, kDEFAULT, kDO, kELSE, kENUM, kFINAL,
kFOR, kFINALLY, kIF, kIMPLEMENTS, kIMPORT,
kINTERFACE, kNATIVE, kPACKAGE, kPRIVATE, kPROTECTED, kPUBLIC,
kRETURN, kSTATIC, kSTRICTFP, kSWITCH, kSYNCHRONIZED,
kTHROW, kTHROWS, kTRANSIENT, kTRY, kVOLATILE, kWHILE);
public static void parseArgumentList(PsiBuilder builder, IElementType closingBrace, GroovyParser parser) {
boolean hasFirstArg = argumentParse(builder, parser);
if (!hasFirstArg) {
if (!closingBrace.equals(builder.getTokenType())) {
builder.error(GroovyBundle.message("expression.expected"));
}
if (mRCURLY.equals(builder.getTokenType())) return;
if (!mCOMMA.equals(builder.getTokenType()) &&
!closingBrace.equals(builder.getTokenType())) {
builder.advanceLexer();
}
}
ParserUtils.getToken(builder, mNLS);
boolean hasErrors = false;
while (!builder.eof() && !closingBrace.equals(builder.getTokenType())) {
if (!ParserUtils.getToken(builder, mCOMMA) && hasFirstArg) {
builder.error("',' or '" + closingBrace + "' expected");
hasErrors = true;
}
ParserUtils.getToken(builder, mNLS);
if (hasErrors && CONTROL_KEYWORDS.contains(builder.getTokenType())) {
return;
}
if (!argumentParse(builder, parser)) {
if (!closingBrace.equals(builder.getTokenType())) {
builder.error(GroovyBundle.message("expression.expected"));
hasErrors = true;
}
if (mRCURLY.equals(builder.getTokenType())) return;
if (!mCOMMA.equals(builder.getTokenType()) &&
!closingBrace.equals(builder.getTokenType())) {
builder.advanceLexer();
}
}
ParserUtils.getToken(builder, mNLS);
}
ParserUtils.getToken(builder, mNLS);
}
/**
* Parses argument, possible with label
*
* @param builder
* @return
*/
private static boolean argumentParse(PsiBuilder builder, GroovyParser parser) {
PsiBuilder.Marker argMarker = builder.mark();
if (argumentLabelStartCheck(builder, parser)) {
ParserUtils.getToken(builder, mCOLON, GroovyBundle.message("colon.expected"));
if (!AssignmentExpression.parse(builder, parser)) {
builder.error(GroovyBundle.message("expression.expected"));
}
argMarker.done(NAMED_ARGUMENT);
return true;
}
if (ParserUtils.getToken(builder, mSTAR)) {
if (AssignmentExpression.parse(builder, parser)) {
argMarker.done(SPREAD_ARGUMENT);
}
else {
builder.error(GroovyBundle.message("colon.expected"));
argMarker.done(NAMED_ARGUMENT);
}
return true;
}
argMarker.drop();
return AssignmentExpression.parse(builder, parser);
}
/**
* Checks for argument label. In case when it is so, a caret will not be restored at
* initial position
*
* @param builder
* @return
*/
public static boolean argumentLabelStartCheck(PsiBuilder builder, GroovyParser parser) {
PsiBuilder.Marker marker = builder.mark();
if (ParserUtils.lookAhead(builder, mSTAR, mCOLON)) {
builder.advanceLexer();
marker.done(ARGUMENT_LABEL);
return true;
}
final IElementType type = builder.getTokenType();
if (ParserUtils.lookAhead(builder, mIDENT, mCOLON) ||
TokenSets.KEYWORDS.contains(type) ||
mSTRING_LITERAL.equals(type) ||
mGSTRING_LITERAL.equals(type)) {
builder.advanceLexer();
if (mCOLON.equals(builder.getTokenType())) {
marker.done(ARGUMENT_LABEL);
return true;
}
else {
marker.rollbackTo();
return false;
}
}
if (mGSTRING_BEGIN.equals(type) ||
mREGEX_BEGIN.equals(type) ||
mDOLLAR_SLASH_REGEX_BEGIN.equals(type) ||
TokenSets.NUMBERS.contains(type) ||
mLBRACK.equals(type) ||
mLPAREN.equals(type) ||
mLCURLY.equals(type)) {
PrimaryExpression.parsePrimaryExpression(builder, parser);
if (mCOLON.equals(builder.getTokenType())) {
marker.done(ARGUMENT_LABEL);
return true;
}
else {
marker.rollbackTo();
return false;
}
}
marker.drop();
return false;
}
}
|
3e0e9320454b5a15bea875747959c1d59717da9c | 1,660 | java | Java | src/main/java/com/sapient/ibench/client/gui/button/iBenchButton.java | JuergenSchT/iBench | 2bc8de38374f6a73c56da495e28e1829112d5c46 | [
"MIT"
] | 2 | 2015-01-14T15:09:22.000Z | 2015-01-27T19:32:15.000Z | src/main/java/com/sapient/ibench/client/gui/button/iBenchButton.java | JuergenSchT/iBench | 2bc8de38374f6a73c56da495e28e1829112d5c46 | [
"MIT"
] | 1 | 2015-12-09T20:22:09.000Z | 2016-01-11T15:05:57.000Z | src/main/java/com/sapient/ibench/client/gui/button/iBenchButton.java | JuergenSchT/iBench | 2bc8de38374f6a73c56da495e28e1829112d5c46 | [
"MIT"
] | 7 | 2015-03-12T23:37:09.000Z | 2021-02-04T18:42:57.000Z | 30.181818 | 158 | 0.619277 | 6,190 | package com.sapient.ibench.client.gui.button;
import com.sapient.ibench.reference.Textures;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.util.ResourceLocation;
public class iBenchButton extends GuiButton {
public static final int SPIN = 0;
public static final int BALANCE = 1;
public static final int EMPTY = 2;
public static final ResourceLocation widgetTextures = Textures.Gui.WIDGETS;
private int iconOffsetX = 0;
private int iconOffsetY = 0;
public iBenchButton(int id, int xPos, int yPos, int width, int height, int type) {
super(id, xPos, yPos, width, height, "");
if (type == SPIN) {
iconOffsetX = 64;
iconOffsetY = 16;
} else if (type == BALANCE) {
iconOffsetX = 64;
iconOffsetY = 32;
} else if (type == EMPTY) {
iconOffsetX = 64;
iconOffsetY = 0;
}
}
/**
* Draws this button to the screen.
*
* @param minecraft
* @param mPosX
* @param mPosY
*/
@Override
public void drawButton(Minecraft minecraft, int mPosX, int mPosY) {
if (this.visible) {
minecraft.getTextureManager().bindTexture(widgetTextures);
boolean hover = mPosX >= this.xPosition && mPosY >= this.yPosition && mPosX < this.xPosition + this.width && mPosY < this.yPosition + this.height;
int h = getHoverState(hover);
int fromLeft = iconOffsetX + (h-1) * 16;
this.drawTexturedModalRect(this.xPosition, this.yPosition, fromLeft, iconOffsetY, 16, 16);
}
}
}
|
3e0e940e91ebf688976468a04e187ae77b2fa560 | 3,370 | java | Java | src/main/java/org/dataalgorithms/machinelearning/naivebayes/diabetes/TestAccuracyOfModel.java | vickyi/data-algorithms-book | b5f9524e426cf7ac4b4148bd505ada2042292d09 | [
"Apache-2.0"
] | 1,038 | 2015-01-01T05:13:31.000Z | 2022-03-26T13:41:34.000Z | src/main/java/org/dataalgorithms/machinelearning/naivebayes/diabetes/TestAccuracyOfModel.java | vickyi/data-algorithms-book | b5f9524e426cf7ac4b4148bd505ada2042292d09 | [
"Apache-2.0"
] | 32 | 2016-02-15T19:23:38.000Z | 2021-06-25T21:38:47.000Z | src/main/java/org/dataalgorithms/machinelearning/naivebayes/diabetes/TestAccuracyOfModel.java | vickyi/data-algorithms-book | b5f9524e426cf7ac4b4148bd505ada2042292d09 | [
"Apache-2.0"
] | 715 | 2015-01-03T16:39:57.000Z | 2022-03-09T17:00:01.000Z | 34.824742 | 106 | 0.638544 | 6,191 | package org.dataalgorithms.machinelearning.naivebayes.diabetes;
import scala.Tuple2;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.mllib.classification.NaiveBayesModel;
import org.apache.spark.mllib.regression.LabeledPoint;
/**
* The goal is to check the accuracy of the built model by a set of test data,
* which we already know their classification.
*
*
* "Training Data" and "Test Data" have the same format:
* =====================================================
* Each record: has 9 attributes (8 features and an associated classification):
* 1. Number of times pregnant
* 2. Plasma glucose concentration a 2 hours in an oral glucose tolerance test
* 3. Diastolic blood pressure (mm Hg)
* 4. Triceps skin fold thickness (mm)
* 5. 2-Hour serum insulin (mu U/ml)
* 6. Body mass index (weight in kg/(height in m)^2)
* 7. Diabetes pedigree function
* 8. Age (years)
* 9. Class variable (0 or 1); the class value 1 is interpreted as "tested positive for diabetes"
*
*
*
* @author Mahmoud Parsian (lyhxr@example.com)
*
*/
public class TestAccuracyOfModel {
private static final Logger THE_LOGGER = Logger.getLogger(TestAccuracyOfModel.class);
public static void main(String[] args) throws Exception {
Util.printArguments(args);
if (args.length != 2) {
throw new RuntimeException("usage: TestAccuracyOfModel <test-data-path> <saved-model-path> ");
}
//
String testDataPath = args[0];
String savedModelPath = args[1];
THE_LOGGER.info("--- testDataPath=" + testDataPath);
THE_LOGGER.info("--- savedModelPath=" + savedModelPath);
// create a Factory context object
JavaSparkContext context = Util.createJavaSparkContext("TestAccuracyOfModel");
//
// create test data set
// input records format: <feature-1><,>...<,><feature-8><,><classification>
//
JavaRDD<String> testRDD = context.textFile(testDataPath);
JavaRDD<LabeledPoint> test = Util.createLabeledPointRDD(testRDD);
//
// load the built model from the saved path
//
final NaiveBayesModel model = NaiveBayesModel.load(context.sc(), savedModelPath);
//
// predict the test data
//
JavaPairRDD<Double, Double> predictionAndLabel =
test.mapToPair(new PairFunction<LabeledPoint, Double, Double>() {
@Override
public Tuple2<Double, Double> call(LabeledPoint p) {
return new Tuple2<Double, Double>(model.predict(p.features()), p.label());
}
});
//
// check accuracy of test data against the training data
//
double accuracy = predictionAndLabel.filter(new Function<Tuple2<Double, Double>, Boolean>() {
@Override
public Boolean call(Tuple2<Double, Double> pl) {
return pl._1().equals(pl._2());
}
}).count() / (double) test.count();
THE_LOGGER.info("accuracy="+accuracy);
// done
context.close();
}
}
|
3e0e9592d10db8458cb30d202ae3f96705408a27 | 418 | java | Java | src/main/java/no/nav/veilarbvedtaksstotte/VeilarbvedtaksstotteApp.java | navikt/veilarbvedtaksstotte | 3b80d5b72e4798d5e6c1a24a5bcc86cb18c7bbb6 | [
"MIT"
] | 2 | 2021-01-30T11:08:15.000Z | 2021-02-09T08:37:40.000Z | src/main/java/no/nav/veilarbvedtaksstotte/VeilarbvedtaksstotteApp.java | navikt/veilarbvedtaksstotte | 3b80d5b72e4798d5e6c1a24a5bcc86cb18c7bbb6 | [
"MIT"
] | 101 | 2019-05-28T16:39:51.000Z | 2022-03-31T13:33:21.000Z | src/main/java/no/nav/veilarbvedtaksstotte/VeilarbvedtaksstotteApp.java | navikt/veilarbvedtaksstotte | 3b80d5b72e4798d5e6c1a24a5bcc86cb18c7bbb6 | [
"MIT"
] | 1 | 2019-12-18T07:40:08.000Z | 2019-12-18T07:40:08.000Z | 26.125 | 68 | 0.787081 | 6,192 | package no.nav.veilarbvedtaksstotte;
import no.nav.common.utils.SslUtils;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class VeilarbvedtaksstotteApp {
public static void main(String... args) {
SslUtils.setupTruststore();
SpringApplication.run(VeilarbvedtaksstotteApp.class, args);
}
}
|
3e0e961337c8bc223d999aed058195474ce9f2e1 | 1,385 | java | Java | java11/movies/models/src/main/java/de/mbe/tutorials/aws/serverless/movies/models/Movie.java | wilq87/how-to-serverless-with-aws-part1 | d1f12fa8e7f505ddc76a7c4b5aa5b56337e67793 | [
"Apache-2.0"
] | null | null | null | java11/movies/models/src/main/java/de/mbe/tutorials/aws/serverless/movies/models/Movie.java | wilq87/how-to-serverless-with-aws-part1 | d1f12fa8e7f505ddc76a7c4b5aa5b56337e67793 | [
"Apache-2.0"
] | null | null | null | java11/movies/models/src/main/java/de/mbe/tutorials/aws/serverless/movies/models/Movie.java | wilq87/how-to-serverless-with-aws-part1 | d1f12fa8e7f505ddc76a7c4b5aa5b56337e67793 | [
"Apache-2.0"
] | null | null | null | 21.984127 | 77 | 0.665704 | 6,193 | package de.mbe.tutorials.aws.serverless.movies.models;
import java.time.LocalDate;
public final class Movie {
private String id;
private String name;
private String countryOfOrigin;
private LocalDate releaseDate;
private Integer rottenTomatoesRating;
private Integer imdbRating;
public Movie() { }
public String getId() {
return id;
}
public String getName() {
return name;
}
public String getCountryOfOrigin() {
return countryOfOrigin;
}
public LocalDate getReleaseDate() {
return releaseDate;
}
public Integer getRottenTomatoesRating() {
return rottenTomatoesRating;
}
public Integer getImdbRating() {
return imdbRating;
}
public void setId(final String id) {
this.id = id;
}
public void setName(final String name) {
this.name = name;
}
public void setCountryOfOrigin(final String countryOfOrigin) {
this.countryOfOrigin = countryOfOrigin;
}
public void setReleaseDate(final LocalDate releaseDate) {
this.releaseDate = releaseDate;
}
public void setRottenTomatoesRating(final Integer rottenTomatoesRating) {
this.rottenTomatoesRating = rottenTomatoesRating;
}
public void setImdbRating(final Integer imdbRating) {
this.imdbRating = imdbRating;
}
} |
3e0e96a6403e9d0e32a7651ecb99d347dd158f3f | 16,052 | java | Java | src/com/computablefacts/morta/snorkel/Summary.java | computablefacts/morta | 4db2144a894a7f7d26b76467014141f68a8b8989 | [
"Apache-2.0"
] | null | null | null | src/com/computablefacts/morta/snorkel/Summary.java | computablefacts/morta | 4db2144a894a7f7d26b76467014141f68a8b8989 | [
"Apache-2.0"
] | 1 | 2022-02-01T01:09:32.000Z | 2022-02-01T01:09:32.000Z | src/com/computablefacts/morta/snorkel/Summary.java | computablefacts/morta | 4db2144a894a7f7d26b76467014141f68a8b8989 | [
"Apache-2.0"
] | null | null | null | 35.048035 | 122 | 0.646025 | 6,194 | package com.computablefacts.morta.snorkel;
import static com.computablefacts.morta.snorkel.ILabelingFunction.ABSTAIN;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.computablefacts.nona.Generated;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.errorprone.annotations.CheckReturnValue;
import com.google.errorprone.annotations.Var;
import smile.stat.hypothesis.CorTest;
@CheckReturnValue
final public class Summary {
private final String label_;
private final Set<String> polarity_;
private final Set<String> overlapsWith_;
private final Set<String> conflictsWith_;
private final double coverage_;
private final double overlaps_;
private final double conflicts_;
private final int correct_;
private final int incorrect_;
private final int abstain_;
public Summary(String label, Set<String> polarity, double coverage, double overlaps,
double conflicts, int correct, int incorrect, int abstain, Set<String> overlapsWith,
Set<String> conflictsWith) {
label_ = Preconditions.checkNotNull(label, "label should not be null");
polarity_ = Preconditions.checkNotNull(polarity, "polarity should not be null");
coverage_ = coverage;
overlaps_ = overlaps;
conflicts_ = conflicts;
correct_ = correct;
incorrect_ = incorrect;
abstain_ = abstain;
overlapsWith_ = overlapsWith == null ? new HashSet<>() : new HashSet<>(overlapsWith);
conflictsWith_ = conflictsWith == null ? new HashSet<>() : new HashSet<>(conflictsWith);
}
/**
* Compute correlation between each pair of labeling functions.
*
* @param lfNames mapping of the labeling function names to integers. Each integer represents the
* position of the labeling function in the lfs list.
* @param lfLabels mapping of the labeling function outputs, i.e. labels, to integers. Each
* integer represents a machine-friendly version of a human-readable label.
* @param instances output of the labeling functions for each datapoint.
* @param correlation correlation type.
* @return a correlation matrix.
*/
public static <T> Table<String, String, CorTest> labelingFunctionsCorrelations(Dictionary lfNames,
Dictionary lfLabels, List<Map.Entry<T, FeatureVector<Integer>>> instances,
eCorrelation correlation) {
Preconditions.checkNotNull(lfNames, "lfNames should not be null");
Preconditions.checkNotNull(lfLabels, "lfLabels should not be null");
Preconditions.checkNotNull(instances, "instances should not be null");
Preconditions.checkNotNull(correlation, "correlation should not be null");
int nbLabelingFunctions = lfNames.size();
List<double[]> matrix = new ArrayList<>(nbLabelingFunctions);
// Transpose
for (int i = 0; i < nbLabelingFunctions; i++) {
double[] vector = new double[instances.size()];
for (int j = 0; j < instances.size(); j++) {
vector[j] = instances.get(j).getValue().get(i);
}
matrix.add(vector);
}
// Compute correlation coefficient between each LF
Table<String, String, CorTest> correlations = HashBasedTable.create();
for (int i = 0; i < matrix.size(); i++) {
for (int j = 0; j < matrix.size(); j++) {
double[] lf1 = matrix.get(i);
double[] lf2 = matrix.get(j);
if (eCorrelation.KENDALL.equals(correlation)) {
correlations.put(lfNames.label(i), lfNames.label(j), CorTest.kendall(lf1, lf2));
} else if (eCorrelation.SPEARMAN.equals(correlation)) {
correlations.put(lfNames.label(i), lfNames.label(j), CorTest.spearman(lf1, lf2));
} else { // PEARSON
correlations.put(lfNames.label(i), lfNames.label(j), CorTest.pearson(lf1, lf2));
}
}
}
return correlations;
}
/**
* Explore the labeling functions outputs.
*
* @param lfNames mapping of the labeling function names to integers. Each integer represents the
* position of the labeling function in the lfs list.
* @param lfLabels mapping of the labeling function outputs, i.e. labels, to integers. Each
* integer represents a machine-friendly version of a human-readable label.
* @param instances output of the labeling functions for each datapoint.
* @param goldLabels gold labels.
* @return a segmentation of the data according to the output produced by each labeling function.
*/
public static <T> Table<String, eStatus, List<Map.Entry<T, FeatureVector<Integer>>>> explore(
Dictionary lfNames, Dictionary lfLabels, List<Map.Entry<T, FeatureVector<Integer>>> instances,
List<Integer> goldLabels) {
Preconditions.checkNotNull(lfNames, "lfNames should not be null");
Preconditions.checkNotNull(lfLabels, "lfNames should not be null");
Preconditions.checkNotNull(instances, "instances should not be null");
Preconditions.checkNotNull(goldLabels, "goldLabels should not be null");
Preconditions.checkArgument(instances.size() == goldLabels.size(),
"Mismatch between the number of instances and the number of gold labels : %s vs %s",
instances.size(), goldLabels.size());
int nbLabelingFunctions = lfNames.size();
Table<String, eStatus, List<Map.Entry<T, FeatureVector<Integer>>>> table =
HashBasedTable.create();
for (int i = 0; i < nbLabelingFunctions; i++) {
String lfName = lfNames.label(i);
for (int j = 0; j < instances.size(); j++) {
Map.Entry<T, FeatureVector<Integer>> instance = instances.get(j);
int lfLabel = instance.getValue().get(i);
Preconditions.checkState(nbLabelingFunctions == instance.getValue().size(),
"Invalid feature vector length : %s found vs %s expected", instance.getValue().size(),
nbLabelingFunctions);
if (lfLabel == goldLabels.get(j)) {
if (lfLabel == ABSTAIN) {
if (!table.contains(lfName, eStatus.CORRECT_ABSTAIN)) {
table.put(lfName, eStatus.CORRECT_ABSTAIN, new ArrayList<>());
}
table.get(lfName, eStatus.CORRECT_ABSTAIN).add(instance);
} else {
if (!table.contains(lfName, eStatus.CORRECT)) {
table.put(lfName, eStatus.CORRECT, new ArrayList<>());
}
table.get(lfName, eStatus.CORRECT).add(instance);
}
} else {
if (lfLabel == ABSTAIN) {
if (!table.contains(lfName, eStatus.INCORRECT_ABSTAIN)) {
table.put(lfName, eStatus.INCORRECT_ABSTAIN, new ArrayList<>());
}
table.get(lfName, eStatus.INCORRECT_ABSTAIN).add(instance);
} else {
if (!table.contains(lfName, eStatus.INCORRECT)) {
table.put(lfName, eStatus.INCORRECT, new ArrayList<>());
}
table.get(lfName, eStatus.INCORRECT).add(instance);
}
}
}
}
return table;
}
/**
* Compute a {@link Summary} object with polarity, coverage, overlaps, etc. for each labeling
* function. When gold labels are provided, this method will compute the number of correct and
* incorrect labels output by each labeling function.
*
* @param lfNames lfNames mapping of the labeling function names to integers. Each integer
* represents the position of the labeling function in the lfs list.
* @param lfLabels mapping of the labeling function outputs, i.e. labels, to integers. Each
* integer represents a machine-friendly version of a human-readable label.
* @param instances output of the labeling functions for each datapoint.
* @param goldLabels gold labels (optional).
* @return a {@link Summary} object for each labeling function.
*/
public static <T> List<Summary> summarize(Dictionary lfNames, Dictionary lfLabels,
List<Map.Entry<T, FeatureVector<Integer>>> instances, List<Integer> goldLabels) {
Preconditions.checkNotNull(lfNames, "lfNames should not be null");
Preconditions.checkNotNull(lfLabels, "lfLabels should not be null");
Preconditions.checkNotNull(instances, "instances should not be null");
int nbLabelingFunctions = lfNames.size();
List<Summary> summaries = new ArrayList<>(nbLabelingFunctions);
Preconditions.checkState(goldLabels == null || instances.size() == goldLabels.size(),
"Mismatch between the number of instances and the number of gold labels : %s vs %s",
instances.size(), goldLabels == null ? 0 : goldLabels.size());
for (int i = 0; i < nbLabelingFunctions; i++) {
String labelingFunctionName = lfNames.label(i);
Set<String> labels = new HashSet<>();
Map<String, Set<String>> overlapsWith = new HashMap<>();
Map<String, Set<String>> conflictsWith = new HashMap<>();
@Var
double nbLabelled = 0;
@Var
double nbOverlaps = 0;
@Var
double nbConflicts = 0;
@Var
double nbDataPoints = 0;
@Var
int nbCorrect = goldLabels == null ? -1 : 0;
@Var
int nbIncorrect = goldLabels == null ? -1 : 0;
@Var
int nbAbstain = goldLabels == null ? -1 : 0;
for (int j = 0; j < instances.size(); j++) {
nbDataPoints += 1.0;
Map.Entry<T, FeatureVector<Integer>> featureVector = instances.get(j);
String lfName = lfNames.label(i);
int lfValue = featureVector.getValue().get(i);
Preconditions.checkState(nbLabelingFunctions == featureVector.getValue().size(),
"Invalid feature vector length : %s found vs %s expected",
featureVector.getValue().size(), nbLabelingFunctions);
Preconditions.checkState(labelingFunctionName.equals(lfName),
"Invalid labeling function name : %s found vs %s expected", lfName,
labelingFunctionName);
if (lfValue > ABSTAIN) {
if (goldLabels != null) {
if (lfValue == goldLabels.get(j)) {
nbCorrect++;
} else {
nbIncorrect++;
}
}
nbLabelled += 1.0;
labels.add(lfLabels.label(lfValue));
@Var
boolean hasOverlap = false;
@Var
boolean hasConflict = false;
for (int k = 0; /* (!hasOverlap || !hasConflict) && */ k < nbLabelingFunctions; k++) {
if (k != i) {
int lfv = featureVector.getValue().get(k);
String lfn = lfNames.label(k);
if (lfv > ABSTAIN && lfv == lfValue) {
if (!hasOverlap) {
nbOverlaps += 1.0;
hasOverlap = true;
}
if (!overlapsWith.containsKey(lfName)) {
overlapsWith.put(lfName, new HashSet<>());
}
overlapsWith.get(lfName).add(lfn);
}
if (lfv > ABSTAIN && lfv != lfValue) {
if (!hasConflict) {
nbConflicts += 1.0;
hasConflict = true;
}
if (!conflictsWith.containsKey(lfName)) {
conflictsWith.put(lfName, new HashSet<>());
}
conflictsWith.get(lfName).add(lfn);
}
}
}
} else {
nbAbstain++;
}
}
Preconditions.checkState(
goldLabels == null || nbCorrect + nbIncorrect + nbAbstain == instances.size(),
"Mismatch between the number of correct/incorrect labels and the number of instances : %s found vs %s expected",
nbCorrect + nbIncorrect + nbAbstain, instances.size());
summaries.add(new Summary(labelingFunctionName, labels, nbLabelled / nbDataPoints,
nbOverlaps / nbLabelled, nbConflicts / nbLabelled, nbCorrect, nbIncorrect, nbAbstain,
overlapsWith.get(labelingFunctionName), conflictsWith.get(labelingFunctionName)));
}
return summaries;
}
@Generated
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("label", label_).add("polarity", polarity_)
.add("coverage", coverage_).add("overlaps", overlaps_).add("conflicts", conflicts_)
.add("correct", correct_).add("incorrect", incorrect_).add("abstain", abstain_)
.add("overlaps_with", overlapsWith_).add("conflicts_with", conflictsWith_).toString();
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof Summary)) {
return false;
}
Summary summary = (Summary) obj;
return Objects.equal(label_, summary.label_) && Objects.equal(polarity_, summary.polarity_)
&& Objects.equal(coverage_, summary.coverage_)
&& Objects.equal(overlaps_, summary.overlaps_)
&& Objects.equal(conflicts_, summary.conflicts_)
&& Objects.equal(correct_, summary.correct_)
&& Objects.equal(incorrect_, summary.incorrect_)
&& Objects.equal(abstain_, summary.abstain_)
&& Objects.equal(overlapsWith_, summary.overlapsWith_)
&& Objects.equal(conflictsWith_, summary.conflictsWith_);
}
@Override
public int hashCode() {
return Objects.hashCode(label_, polarity_, coverage_, overlaps_, conflicts_, correct_,
incorrect_, abstain_, overlapsWith_, conflictsWith_);
}
/**
* The name of the considered LF.
*
* @return the LF's name
*/
@Generated
public String label() {
return label_;
}
/**
* Polarity: The set of unique labels this LF outputs (excluding abstains).
*
* @return polarity
*/
@Generated
public Set<String> polarity() {
return polarity_;
}
/**
* Coverage: The fraction of the dataset this LF labels.
*
* @return coverage
*/
@Generated
public double coverage() {
return coverage_;
}
/**
* Overlaps: The fraction of the dataset where this LF and at least one other LF label.
*
* @return overlaps
*/
@Generated
public double overlaps() {
return overlaps_;
}
/**
* List of LF this LF overlaps with.
*
* @return overlapping LF
*/
@Generated
public Set<String> overlapsWith() {
return overlapsWith_;
}
/**
* Conflicts: The fraction of the dataset where this LF and at least one other LF label and
* disagree.
*
* @return conflicts
*/
@Generated
public double conflicts() {
return conflicts_;
}
/**
* List of LF this LF conflicts with.
*
* @return conflicting LF
*/
@Generated
public Set<String> conflictsWith() {
return conflictsWith_;
}
/**
* Correct: The number of data points this LF labels correctly (if gold labels are provided).
*
* @return correct
*/
@Generated
public int correct() {
return correct_;
}
/**
* Incorrect: The number of data points this LF labels incorrectly (if gold labels are provided).
*
* @return incorrect
*/
@Generated
public int incorrect() {
return incorrect_;
}
/**
* Abstain: The number of data points this LF has abstained (if gold labels are provided).
*
* @return abstain
*/
@Generated
public int abstain() {
return abstain_;
}
// TODO : compute empirical accuracy
/**
* Note that Spearman is computed on ranks and so depicts monotonic relationships while Pearson is
* on true values and depicts linear relationships. If Spearman > Pearson the correlation is
* monotonic but not linear.
*/
public enum eCorrelation {
PEARSON, KENDALL, SPEARMAN
}
public enum eStatus {
ALL, CORRECT, /* the LF output the same label as the gold one */
INCORRECT, /* the LF output a label different from the gold one */
CORRECT_ABSTAIN, /* both the LF and the gold label are ABSTAIN */
INCORRECT_ABSTAIN /* the LF output is ABSTAIN but the gold one is not */
}
}
|
3e0e9ace96f7191eb2f822dc75448fa85726498e | 22,806 | java | Java | jxfw-eclipse-bundles/bundles/jxfw-metamodel.edit/src/ru/croc/ctp/jxfw/XtendMetaModel/provider/XtendClassItemProvider.java | croc-code/jxfw | 7af05aa6579a8dfd0e83c918b0e46195eb0802cf | [
"Apache-2.0"
] | 12 | 2021-11-08T08:08:44.000Z | 2022-02-21T15:42:25.000Z | jxfw-eclipse-bundles/bundles/jxfw-metamodel.edit/src/ru/croc/ctp/jxfw/XtendMetaModel/provider/XtendClassItemProvider.java | crocinc/jxfw | da93ee21e4e586219eeefc8441e472f7136266d1 | [
"Apache-2.0"
] | null | null | null | jxfw-eclipse-bundles/bundles/jxfw-metamodel.edit/src/ru/croc/ctp/jxfw/XtendMetaModel/provider/XtendClassItemProvider.java | crocinc/jxfw | da93ee21e4e586219eeefc8441e472f7136266d1 | [
"Apache-2.0"
] | 1 | 2022-03-02T14:05:28.000Z | 2022-03-02T14:05:28.000Z | 38.393939 | 134 | 0.608524 | 6,195 | /**
*/
package ru.croc.ctp.jxfw.XtendMetaModel.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import ru.croc.ctp.jxfw.XtendMetaModel.XMMFactory;
import ru.croc.ctp.jxfw.XtendMetaModel.XMMPackage;
import ru.croc.ctp.jxfw.XtendMetaModel.XtendClass;
/**
* This is the item provider adapter for a {@link ru.croc.ctp.jxfw.XtendMetaModel.XtendClass} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class XtendClassItemProvider extends ItemProviderAdapter {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public XtendClassItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addNamePropertyDescriptor(object);
addInstanceClassNamePropertyDescriptor(object);
addInstanceClassPropertyDescriptor(object);
addDefaultValuePropertyDescriptor(object);
addInstanceTypeNamePropertyDescriptor(object);
addAbstractPropertyDescriptor(object);
addInterfacePropertyDescriptor(object);
addESuperTypesPropertyDescriptor(object);
addEAllAttributesPropertyDescriptor(object);
addEAllReferencesPropertyDescriptor(object);
addEReferencesPropertyDescriptor(object);
addEAttributesPropertyDescriptor(object);
addEAllContainmentsPropertyDescriptor(object);
addEAllOperationsPropertyDescriptor(object);
addEAllStructuralFeaturesPropertyDescriptor(object);
addEAllSuperTypesPropertyDescriptor(object);
addEIDAttributePropertyDescriptor(object);
addEAllGenericSuperTypesPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Name feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addNamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ENamedElement_name_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ENamedElement_name_feature", "_UI_ENamedElement_type"),
EcorePackage.Literals.ENAMED_ELEMENT__NAME,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Instance Class Name feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addInstanceClassNamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClassifier_instanceClassName_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClassifier_instanceClassName_feature", "_UI_EClassifier_type"),
EcorePackage.Literals.ECLASSIFIER__INSTANCE_CLASS_NAME,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Instance Class feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addInstanceClassPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClassifier_instanceClass_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClassifier_instanceClass_feature", "_UI_EClassifier_type"),
EcorePackage.Literals.ECLASSIFIER__INSTANCE_CLASS,
false,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Default Value feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addDefaultValuePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClassifier_defaultValue_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClassifier_defaultValue_feature", "_UI_EClassifier_type"),
EcorePackage.Literals.ECLASSIFIER__DEFAULT_VALUE,
false,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Instance Type Name feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addInstanceTypeNamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClassifier_instanceTypeName_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClassifier_instanceTypeName_feature", "_UI_EClassifier_type"),
EcorePackage.Literals.ECLASSIFIER__INSTANCE_TYPE_NAME,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Abstract feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addAbstractPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_abstract_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_abstract_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__ABSTRACT,
true,
false,
false,
ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Interface feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addInterfacePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_interface_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_interface_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__INTERFACE,
true,
false,
false,
ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the ESuper Types feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addESuperTypesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eSuperTypes_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eSuperTypes_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__ESUPER_TYPES,
true,
false,
true,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll Attributes feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllAttributesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllAttributes_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllAttributes_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_ATTRIBUTES,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll References feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllReferencesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllReferences_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllReferences_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_REFERENCES,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EReferences feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEReferencesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eReferences_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eReferences_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EREFERENCES,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAttributes feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAttributesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAttributes_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAttributes_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EATTRIBUTES,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll Containments feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllContainmentsPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllContainments_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllContainments_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_CONTAINMENTS,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll Operations feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllOperationsPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllOperations_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllOperations_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_OPERATIONS,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll Structural Features feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllStructuralFeaturesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllStructuralFeatures_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllStructuralFeatures_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_STRUCTURAL_FEATURES,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll Super Types feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllSuperTypesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllSuperTypes_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllSuperTypes_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_SUPER_TYPES,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EID Attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEIDAttributePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eIDAttribute_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eIDAttribute_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EID_ATTRIBUTE,
false,
false,
false,
null,
null,
null));
}
/**
* This adds a property descriptor for the EAll Generic Super Types feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEAllGenericSuperTypesPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EClass_eAllGenericSuperTypes_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EClass_eAllGenericSuperTypes_feature", "_UI_EClass_type"),
EcorePackage.Literals.ECLASS__EALL_GENERIC_SUPER_TYPES,
false,
false,
false,
null,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(EcorePackage.Literals.EMODEL_ELEMENT__EANNOTATIONS);
childrenFeatures.add(EcorePackage.Literals.ECLASSIFIER__ETYPE_PARAMETERS);
childrenFeatures.add(EcorePackage.Literals.ECLASS__EOPERATIONS);
childrenFeatures.add(EcorePackage.Literals.ECLASS__ESTRUCTURAL_FEATURES);
childrenFeatures.add(EcorePackage.Literals.ECLASS__EGENERIC_SUPER_TYPES);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns XtendClass.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/XtendClass"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((XtendClass)object).getName();
return label == null || label.length() == 0 ?
getString("_UI_XtendClass_type") :
getString("_UI_XtendClass_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(XtendClass.class)) {
case XMMPackage.XTEND_CLASS__NAME:
case XMMPackage.XTEND_CLASS__INSTANCE_CLASS_NAME:
case XMMPackage.XTEND_CLASS__INSTANCE_CLASS:
case XMMPackage.XTEND_CLASS__DEFAULT_VALUE:
case XMMPackage.XTEND_CLASS__INSTANCE_TYPE_NAME:
case XMMPackage.XTEND_CLASS__ABSTRACT:
case XMMPackage.XTEND_CLASS__INTERFACE:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case XMMPackage.XTEND_CLASS__EANNOTATIONS:
case XMMPackage.XTEND_CLASS__ETYPE_PARAMETERS:
case XMMPackage.XTEND_CLASS__EOPERATIONS:
case XMMPackage.XTEND_CLASS__ESTRUCTURAL_FEATURES:
case XMMPackage.XTEND_CLASS__EGENERIC_SUPER_TYPES:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(EcorePackage.Literals.ECLASS__ESTRUCTURAL_FEATURES,
XMMFactory.eINSTANCE.createXtendAttribute()));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return XtendMetaModelEditPlugin.INSTANCE;
}
}
|
3e0e9b189ae38aef63c82192bac32fb986857fd8 | 1,778 | java | Java | src/main/ui/CustomJTable.java | AnimeAllstar/cash-flow-account | 383a842ceb82c0949af932fe25fa7fb7cdff3ce6 | [
"MIT"
] | null | null | null | src/main/ui/CustomJTable.java | AnimeAllstar/cash-flow-account | 383a842ceb82c0949af932fe25fa7fb7cdff3ce6 | [
"MIT"
] | null | null | null | src/main/ui/CustomJTable.java | AnimeAllstar/cash-flow-account | 383a842ceb82c0949af932fe25fa7fb7cdff3ce6 | [
"MIT"
] | null | null | null | 29.633333 | 84 | 0.602925 | 6,196 | package ui;
import model.Item;
import model.TableModel;
import javax.swing.*;
import javax.swing.table.TableCellEditor;
import java.util.List;
// represents a subclass of JTable
public class CustomJTable extends JTable {
public CustomJTable(TableModel tableModel) {
super(tableModel);
}
/*
* EFFECTS: returns an appropriate CellEditor for the selected cell
* for columns 0 - 2, returns CustomCellEditor
* for column 3, returns DefaultCellEditor
* otherwise, returns super.getCellEditor(row, column)
*/
@Override
public TableCellEditor getCellEditor(int row, int column) {
List<String> itemList = ((TableModel) this.getModel()).getCategoryList(row);
JTextField textField = new JTextField();
switch (column) {
case 0:
textField.setName("Description");
return new CustomCellEditor(textField);
case 1:
textField.setName("Amount");
return new CustomCellEditor(textField);
case 2:
textField.setName("Date");
return new CustomCellEditor(textField);
case 3:
return new DefaultCellEditor(new JComboBox<>(itemList.toArray()));
default:
return super.getCellEditor(row, column);
}
}
/*
* MODIFIES: this
* EFFECTS: remove selected row from this.getModel()
*/
public void removeRow(int selectedRow) {
((TableModel) this.getModel()).removeRow(selectedRow);
}
/*
* MODIFIES: this
* EFFECTS: add row using item to this.getModel()
*/
public void addRow(Item item) {
((TableModel) this.getModel()).addRow(item);
}
}
|
3e0e9b41453e267d0e8f6560fc81d3fe9d894746 | 535 | java | Java | src/main/java/de/zalando/zmon/scheduler/ng/trailruns/TrialRunForwarder.java | zalando-zmon/zmon-scheduler | 6e7938023ecf1c6b8c2a94daa332ccb3ca26ecc2 | [
"Apache-2.0"
] | 8 | 2016-07-28T09:24:54.000Z | 2019-01-22T22:15:26.000Z | src/main/java/de/zalando/zmon/scheduler/ng/trailruns/TrialRunForwarder.java | zalando-zmon/zmon-scheduler | 6e7938023ecf1c6b8c2a94daa332ccb3ca26ecc2 | [
"Apache-2.0"
] | 82 | 2016-06-02T06:56:34.000Z | 2020-03-10T09:45:02.000Z | src/main/java/de/zalando/zmon/scheduler/ng/trailruns/TrialRunForwarder.java | zalando-zmon/zmon-scheduler | 6e7938023ecf1c6b8c2a94daa332ccb3ca26ecc2 | [
"Apache-2.0"
] | 2 | 2018-05-08T07:30:31.000Z | 2020-01-13T17:05:39.000Z | 28.157895 | 78 | 0.790654 | 6,197 | package de.zalando.zmon.scheduler.ng.trailruns;
import de.zalando.zmon.scheduler.ng.DataCenterSubscriber;
import de.zalando.zmon.scheduler.ng.config.SchedulerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Created by jmussler on 5/22/15.
*/
@Component
public class TrialRunForwarder extends DataCenterSubscriber<TrialRunRequest> {
@Autowired
public TrialRunForwarder(SchedulerConfig config) {
super(config.isTrialRunForward());
}
}
|
3e0e9c963f000c29f803e53d83a231a4484be6e2 | 2,872 | java | Java | protonj2/src/main/java/org/apache/qpid/protonj2/engine/Attachments.java | jiridanek/qpid-protonj2 | 81cdfde6279137185260b0f2a269c661635b9a37 | [
"Apache-2.0"
] | 2 | 2019-04-28T20:38:32.000Z | 2020-05-21T22:25:43.000Z | protonj2/src/main/java/org/apache/qpid/protonj2/engine/Attachments.java | jiridanek/qpid-protonj2 | 81cdfde6279137185260b0f2a269c661635b9a37 | [
"Apache-2.0"
] | null | null | null | protonj2/src/main/java/org/apache/qpid/protonj2/engine/Attachments.java | jiridanek/qpid-protonj2 | 81cdfde6279137185260b0f2a269c661635b9a37 | [
"Apache-2.0"
] | 1 | 2018-04-05T18:51:10.000Z | 2018-04-05T18:51:10.000Z | 35.02439 | 104 | 0.657382 | 6,198 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.protonj2.engine;
/**
* Attachments API used to associate specific data with AMQP Resources
*/
public interface Attachments {
/**
* Gets the user attached value that is associated with the given key, or null
* if no data is mapped to the key.
*
* @param <T> The type to cast the attached mapped value to if one is set.
*
* @param key
* The key to use to lookup the mapped data.
*
* @return the object associated with the given key in this {@link Attachments} instance.
*/
<T> T get(String key);
/**
* Gets the user set {@link Attachments} value that is associated with the given key, or null
* if no data is mapped to the key.
*
* @param <T> The type to cast the attached mapped value to if one is set.
*
* @param key
* The key to use to lookup the mapped data.
* @param typeClass
* The Class that will be used when casting the returned mapped object.
*
* @return the object associated with the given key in this {@link Attachments} instance.
*/
<T> T get(String key, Class<T> typeClass);
/**
* Maps a given object to the given key in this {@link Attachments} instance.
*
* @param <T> The type of the value being set
*
* @param key
* The key to assign the value to
* @param value
* The value to map to the given key.
*
* @return this {@link Attachments} instance.
*/
<T> Attachments set(String key, T value);
/**
* Checks if the given key has a value mapped to it in this {@link Attachments} instance.
*
* @param key
* The key to search for a mapping to in this {@link Attachments} instance.
*
* @return true if there is a value mapped to the given key in this {@link Attachments} instance.
*/
boolean containsKey(String key);
/**
* @return this {@link Attachments} instance with all mapped values and the linked resource cleared.
*/
Attachments clear();
}
|
3e0e9ca4f9b043899d6fefddd6b05206eb4e0322 | 2,322 | java | Java | src-gen/fr/imag/adele/cadse/cadseg/type/AbstractTypeImport.java | chomats/cadse.model-cadseg | 5fffcafc02b665038049b26e40bb3daf114c6959 | [
"Apache-2.0"
] | null | null | null | src-gen/fr/imag/adele/cadse/cadseg/type/AbstractTypeImport.java | chomats/cadse.model-cadseg | 5fffcafc02b665038049b26e40bb3daf114c6959 | [
"Apache-2.0"
] | null | null | null | src-gen/fr/imag/adele/cadse/cadseg/type/AbstractTypeImport.java | chomats/cadse.model-cadseg | 5fffcafc02b665038049b26e40bb3daf114c6959 | [
"Apache-2.0"
] | null | null | null | 27.714286 | 71 | 0.698883 | 6,199 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* Copyright (C) 2006-2010 Adele Team/LIG/Grenoble University, France
*/
package fr.imag.adele.cadse.cadseg.type;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.IConfigurationElement;
/**
* The Class AbstractTypeImport.
*
* @author <a href="mailto:envkt@example.com">Stephane Chomat</a>
*/
public class AbstractTypeImport extends AEObject {
/** The manifest_imports. */
private String[] manifest_imports;
/** The java_imports. */
private String[] java_imports;
/**
* Instantiates a new abstract type import.
*
* @param ce
* the ce
*/
public AbstractTypeImport(IConfigurationElement ce) {
super(ce);
List<String> ij = new ArrayList<String>();
List<String> im = new ArrayList<String>();
IConfigurationElement[] children = ce.getChildren();
for (int i = 0; i < children.length; i++) {
if ("import-java".equals(children[i].getName())) {
ij.add(children[i].getAttribute("classname"));
}
if ("import-manifest".equals(children[i].getName())) {
im.add(children[i].getAttribute("package"));
}
}
manifest_imports = (String[]) im.toArray(new String[im.size()]);
java_imports = (String[]) ij.toArray(new String[ij.size()]);
}
/**
* Gets the java imports.
*
* @return the java imports
*/
public String[] getJavaImports() {
return java_imports;
}
/**
* Gets the manifest imports.
*
* @return the manifest imports
*/
public String[] getManifestImports() {
return manifest_imports;
}
}
|
3e0e9cdd2da5e37368ef2567b7b56f52ce989062 | 3,199 | java | Java | siia-examples/trip-diary/src/test/java/com/manning/siia/trip/diary/ExchangerIntegrationTest.java | AshokChaitanya/spring-intergration-in-action | f4c52d5ff803a3f50bd1df5a4dd438d811f9d2f0 | [
"Apache-2.0"
] | 73 | 2015-03-25T14:03:39.000Z | 2022-02-21T14:53:33.000Z | siia-examples/trip-diary/src/test/java/com/manning/siia/trip/diary/ExchangerIntegrationTest.java | AshokChaitanya/spring-intergration-in-action | f4c52d5ff803a3f50bd1df5a4dd438d811f9d2f0 | [
"Apache-2.0"
] | 1 | 2015-05-15T20:45:36.000Z | 2015-05-15T20:45:36.000Z | siia-examples/trip-diary/src/test/java/com/manning/siia/trip/diary/ExchangerIntegrationTest.java | AshokChaitanya/spring-intergration-in-action | f4c52d5ff803a3f50bd1df5a4dd438d811f9d2f0 | [
"Apache-2.0"
] | 88 | 2015-01-07T22:00:59.000Z | 2021-12-10T02:51:12.000Z | 32.313131 | 88 | 0.786183 | 6,200 | /*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.manning.siia.trip.diary;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.springframework.beans.DirectFieldAccessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.integration.Message;
import org.springframework.integration.MessageChannel;
import org.springframework.integration.core.PollableChannel;
import org.springframework.integration.file.FileReadingMessageSource;
import org.springframework.test.context.ContextConfiguration;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.util.HashMap;
import java.util.Map;
/**
* @author Iwein Fuld
*/
@RunWith(org.springframework.test.context.junit4.SpringJUnit4ClassRunner.class)
@ContextConfiguration
public class ExchangerIntegrationTest {
public static TemporaryFolder parent = new TemporaryFolder();
private String processId = ManagementFactory.getRuntimeMXBean().getName();
@Autowired
@Qualifier("incomingChanges")
public PollableChannel incomingChanges;
@Autowired
@Qualifier("outgoingChanges")
public MessageChannel outgoingChanges;
@Autowired
public FileReadingMessageSource fileReader;
@BeforeClass
public static void injectConfig() throws IOException {
parent.create();
Config.diary.put("store", parent.getRoot().getPath());
}
@Test
public void shouldParseContext() {
// this test is be performed by the fixture (Spring)
}
@Test
public void fileReaderShouldGetInputDir() {
Object inputDir = new DirectFieldAccessor(fileReader).getPropertyValue("directory");
System.out.println(inputDir);
assertThat(inputDir, is(notNullValue()));
}
@Test(timeout = 10000)
public void newChangesArePickedUp() throws Exception {
File file = parent.newFile(Long.toString(System.currentTimeMillis()) + processId);
System.out.println(file);
file.createNewFile();
Object inputDir = new DirectFieldAccessor(fileReader).getPropertyValue("directory");
System.out.println(inputDir);
Message<?> received = incomingChanges.receive();
assertThat(received, is(notNullValue()));
}
public static class Config {
public static final Map<String, String> diary = new HashMap<String, String>();
public static final String processId = ManagementFactory.getRuntimeMXBean().getName();
}
}
|
3e0e9e2c4a3bc22b0901138c7a7531f078171eac | 910 | java | Java | src/main/java/org/danielaguilar/cashless/repository/TransactionRepository.java | daniel-aguilar/cashless | e8786c927ad8d1afa7b0aacf7e1e690b0b00ab07 | [
"BSD-2-Clause"
] | null | null | null | src/main/java/org/danielaguilar/cashless/repository/TransactionRepository.java | daniel-aguilar/cashless | e8786c927ad8d1afa7b0aacf7e1e690b0b00ab07 | [
"BSD-2-Clause"
] | null | null | null | src/main/java/org/danielaguilar/cashless/repository/TransactionRepository.java | daniel-aguilar/cashless | e8786c927ad8d1afa7b0aacf7e1e690b0b00ab07 | [
"BSD-2-Clause"
] | null | null | null | 35 | 97 | 0.772527 | 6,201 | package org.danielaguilar.cashless.repository;
import java.util.List;
import org.danielaguilar.cashless.model.Account;
import org.danielaguilar.cashless.model.Game;
import org.danielaguilar.cashless.model.Transaction;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface TransactionRepository extends PagingAndSortingRepository<Transaction, Integer> {
@Query("FROM Transaction "
+ "WHERE sender = ?1 OR recipient = ?1 "
+ "ORDER BY date DESC")
List<Transaction> findLastestTransactions(Account account, Pageable page);
@Query("FROM Transaction "
+ "WHERE sender.game = ?1 AND recipient.game = ?1 "
+ "ORDER BY date DESC")
List<Transaction> findByGame(Game game);
}
|
3e0e9e480330554acad17fe3269acc59ce9e57c5 | 11,645 | java | Java | src/main/java/hxDaedalus/data/math/RandGenerator.java | tommyettinger/gdxWalkable | 33292ee7da363c867086703b1edb16fa62757680 | [
"CC0-1.0"
] | null | null | null | src/main/java/hxDaedalus/data/math/RandGenerator.java | tommyettinger/gdxWalkable | 33292ee7da363c867086703b1edb16fa62757680 | [
"CC0-1.0"
] | null | null | null | src/main/java/hxDaedalus/data/math/RandGenerator.java | tommyettinger/gdxWalkable | 33292ee7da363c867086703b1edb16fa62757680 | [
"CC0-1.0"
] | null | null | null | 16.039945 | 189 | 0.533963 | 6,202 | // Generated by Haxe 3.4.2
package hxDaedalus.data.math;
@SuppressWarnings(value={"rawtypes", "unchecked"})
public class RandGenerator extends haxe.lang.HxObject
{
public RandGenerator(haxe.lang.EmptyObject empty)
{
}
public RandGenerator(java.lang.Object seed, java.lang.Object rangeMin_, java.lang.Object rangeMax_)
{
hxDaedalus.data.math.RandGenerator.__hx_ctor_hxDaedalus_data_math_RandGenerator(this, seed, rangeMin_, rangeMax_);
}
public static void __hx_ctor_hxDaedalus_data_math_RandGenerator(hxDaedalus.data.math.RandGenerator __hx_this, java.lang.Object seed, java.lang.Object rangeMin_, java.lang.Object rangeMax_)
{
int __temp_rangeMax_19 = ( (haxe.lang.Runtime.eq(rangeMax_, null)) ? (1) : haxe.lang.Runtime.toInt(rangeMax_));
int __temp_rangeMin_18 = ( (haxe.lang.Runtime.eq(rangeMin_, null)) ? (0) : haxe.lang.Runtime.toInt(rangeMin_));
int __temp_seed17 = ( (haxe.lang.Runtime.eq(seed, null)) ? (1234) : haxe.lang.Runtime.toInt(seed));
__hx_this._currSeed = __hx_this._originalSeed = __temp_seed17;
__hx_this.rangeMin = __temp_rangeMin_18;
__hx_this.rangeMax = __temp_rangeMax_19;
__hx_this._numIter = 0;
}
public int rangeMin;
public int rangeMax;
public int _originalSeed;
public long _currSeed;
public int _rangeMin;
public int _rangeMax;
public int _numIter;
public int set_seed(int value)
{
this._currSeed = this._originalSeed = value;
return value;
}
public int get_seed()
{
return this._originalSeed;
}
public void reset()
{
this._currSeed = this._originalSeed;
this._numIter = 0;
}
public int next()
{
long x = (this._currSeed += 0x9E3779B97F4A7C15L);
x ^= x >>> 27;
x *= 0x3C79AC492BA7B653L;
x ^= x >>> 33;
x *= 0x1C69B3F74AC4AE35L;
int bound = this.rangeMax - this.rangeMin;
bound = (int)(bound * ((x ^ x >>> 27) & 0xFFFFFFFFL) >> 32);
return this.rangeMin + bound + (bound >>> 31);
}
public int nextInRange(int rangeMin, int rangeMax)
{
this.rangeMin = rangeMin;
this.rangeMax = rangeMax;
return this.next();
}
public <T> void shuffle(haxe.root.Array<T> array)
{
int currIdx = array.length;
while (( currIdx > 0 ))
{
int rndIdx = this.nextInRange(0, ( currIdx - 1 ));
-- currIdx;
T tmp = array.__get(currIdx);
array.__set(currIdx, array.__get(rndIdx));
array.__set(rndIdx, tmp);
}
}
@Override public double __hx_setField_f(java.lang.String field, double value, boolean handleProperties)
{
{
boolean __temp_executeDef1 = true;
switch (field.hashCode())
{
case 356978303:
{
if (field.equals("_numIter"))
{
this._numIter = ((int) (value) );
return value;
}
break;
}
case 3526257:
{
if (field.equals("seed"))
{
this.set_seed(((int) (value) ));
return value;
}
break;
}
case 343359558:
{
if (field.equals("_rangeMax"))
{
this._rangeMax = ((int) (value) );
return value;
}
break;
}
case 252841941:
{
if (field.equals("rangeMin"))
{
this.rangeMin = ((int) (value) );
return value;
}
break;
}
case 343359796:
{
if (field.equals("_rangeMin"))
{
this._rangeMin = ((int) (value) );
return value;
}
break;
}
case 252841703:
{
if (field.equals("rangeMax"))
{
this.rangeMax = ((int) (value) );
return value;
}
break;
}
case 665375010:
{
if (field.equals("_currSeed"))
{
this._currSeed = ((int) (value) );
return value;
}
break;
}
case 34465217:
{
if (field.equals("_originalSeed"))
{
this._originalSeed = ((int) (value) );
return value;
}
break;
}
}
if (__temp_executeDef1)
{
return super.__hx_setField_f(field, value, handleProperties);
}
else
{
throw null;
}
}
}
@Override public java.lang.Object __hx_setField(java.lang.String field, java.lang.Object value, boolean handleProperties)
{
{
boolean __temp_executeDef1 = true;
switch (field.hashCode())
{
case 3526257:
{
if (field.equals("seed"))
{
this.set_seed(haxe.lang.Runtime.toInt(value));
return value;
}
break;
}
case 356978303:
{
if (field.equals("_numIter"))
{
this._numIter = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
case 252841941:
{
if (field.equals("rangeMin"))
{
this.rangeMin = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
case 343359558:
{
if (field.equals("_rangeMax"))
{
this._rangeMax = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
case 252841703:
{
if (field.equals("rangeMax"))
{
this.rangeMax = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
case 343359796:
{
if (field.equals("_rangeMin"))
{
this._rangeMin = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
case 34465217:
{
if (field.equals("_originalSeed"))
{
this._originalSeed = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
case 665375010:
{
if (field.equals("_currSeed"))
{
this._currSeed = haxe.lang.Runtime.toInt(value);
return value;
}
break;
}
}
if (__temp_executeDef1)
{
return super.__hx_setField(field, value, handleProperties);
}
else
{
throw null;
}
}
}
@Override public java.lang.Object __hx_getField(java.lang.String field, boolean throwErrors, boolean isCheck, boolean handleProperties)
{
{
boolean __temp_executeDef1 = true;
switch (field.hashCode())
{
case 2072332025:
{
if (field.equals("shuffle"))
{
return new haxe.lang.Closure(this, "shuffle");
}
break;
}
case 3526257:
{
if (field.equals("seed"))
{
return this.get_seed();
}
break;
}
case -1016590171:
{
if (field.equals("nextInRange"))
{
return new haxe.lang.Closure(this, "nextInRange");
}
break;
}
case 252841941:
{
if (field.equals("rangeMin"))
{
return this.rangeMin;
}
break;
}
case 3377907:
{
if (field.equals("next"))
{
return new haxe.lang.Closure(this, "next");
}
break;
}
case 252841703:
{
if (field.equals("rangeMax"))
{
return this.rangeMax;
}
break;
}
case 108404047:
{
if (field.equals("reset"))
{
return new haxe.lang.Closure(this, "reset");
}
break;
}
case 34465217:
{
if (field.equals("_originalSeed"))
{
return this._originalSeed;
}
break;
}
case 1976638906:
{
if (field.equals("get_seed"))
{
return new haxe.lang.Closure(this, "get_seed");
}
break;
}
case 665375010:
{
if (field.equals("_currSeed"))
{
return (int)this._currSeed;
}
break;
}
case 1415526446:
{
if (field.equals("set_seed"))
{
return new haxe.lang.Closure(this, "set_seed");
}
break;
}
case 343359796:
{
if (field.equals("_rangeMin"))
{
return this._rangeMin;
}
break;
}
case 343359558:
{
if (field.equals("_rangeMax"))
{
return this._rangeMax;
}
break;
}
case 356978303:
{
if (field.equals("_numIter"))
{
return this._numIter;
}
break;
}
}
if (__temp_executeDef1)
{
return super.__hx_getField(field, throwErrors, isCheck, handleProperties);
}
else
{
throw null;
}
}
}
@Override public double __hx_getField_f(java.lang.String field, boolean throwErrors, boolean handleProperties)
{
{
boolean __temp_executeDef1 = true;
switch (field.hashCode())
{
case 356978303:
{
if (field.equals("_numIter"))
{
return this._numIter;
}
break;
}
case 3526257:
{
if (field.equals("seed"))
{
return this.get_seed();
}
break;
}
case 343359558:
{
if (field.equals("_rangeMax"))
{
return this._rangeMax;
}
break;
}
case 252841941:
{
if (field.equals("rangeMin"))
{
return this.rangeMin;
}
break;
}
case 343359796:
{
if (field.equals("_rangeMin"))
{
return this._rangeMin;
}
break;
}
case 252841703:
{
if (field.equals("rangeMax"))
{
return this.rangeMax;
}
break;
}
case 665375010:
{
if (field.equals("_currSeed"))
{
return this._currSeed;
}
break;
}
case 34465217:
{
if (field.equals("_originalSeed"))
{
return this._originalSeed;
}
break;
}
}
if (__temp_executeDef1)
{
return super.__hx_getField_f(field, throwErrors, handleProperties);
}
else
{
throw null;
}
}
}
@Override public java.lang.Object __hx_invokeField(java.lang.String field, haxe.root.Array dynargs)
{
{
boolean __temp_executeDef1 = true;
switch (field.hashCode())
{
case 2072332025:
{
if (field.equals("shuffle"))
{
this.shuffle(((haxe.root.Array<java.lang.Object>) (dynargs.__get(0)) ));
return null;
}
break;
}
case 1415526446:
{
if (field.equals("set_seed"))
{
return this.set_seed(haxe.lang.Runtime.toInt(dynargs.__get(0)));
}
break;
}
case -1016590171:
{
if (field.equals("nextInRange"))
{
return this.nextInRange(haxe.lang.Runtime.toInt(dynargs.__get(0)), haxe.lang.Runtime.toInt(dynargs.__get(1)));
}
break;
}
case 1976638906:
{
if (field.equals("get_seed"))
{
return this.get_seed();
}
break;
}
case 3377907:
{
if (field.equals("next"))
{
return this.next();
}
break;
}
case 108404047:
{
if (field.equals("reset"))
{
__temp_executeDef1 = false;
this.reset();
}
break;
}
}
if (__temp_executeDef1)
{
return super.__hx_invokeField(field, dynargs);
}
}
return null;
}
@Override public void __hx_getFields(haxe.root.Array<java.lang.String> baseArr)
{
baseArr.push("_tempString");
baseArr.push("_numIter");
baseArr.push("_rangeMax");
baseArr.push("_rangeMin");
baseArr.push("_currSeed");
baseArr.push("_originalSeed");
baseArr.push("rangeMax");
baseArr.push("rangeMin");
baseArr.push("seed");
super.__hx_getFields(baseArr);
}
}
|
3e0e9e4e3a7e9c4879dfef45d5dca5880050ee60 | 203 | java | Java | Wipro/Test Suite/src/Calc.java | parshuramreddysudda/InterviewPreparation | 05330b6f83199bdc3d1411da620b2eedb6408c58 | [
"Apache-2.0"
] | 1 | 2021-11-22T11:31:59.000Z | 2021-11-22T11:31:59.000Z | Wipro/Test Suite/src/Calc.java | parshuramreddysudda/InterviewPreparation | 05330b6f83199bdc3d1411da620b2eedb6408c58 | [
"Apache-2.0"
] | null | null | null | Wipro/Test Suite/src/Calc.java | parshuramreddysudda/InterviewPreparation | 05330b6f83199bdc3d1411da620b2eedb6408c58 | [
"Apache-2.0"
] | null | null | null | 8.826087 | 33 | 0.487685 | 6,203 | /**
*
*/
/**
* @author parsh
*
*/
public class Calc {
/**
* @param args
*/
public int add(int v1, int v2) {
return v1 + v2;
}
public int sub(int v1, int v2) {
return v1 - v2;
}
}
|
3e0e9f8c393c034f2302c1660c34f0ac4f740702 | 1,938 | java | Java | src/org.xtuml.bp.io.mdl.test/src/PkgCMGlobalsTestSuiteGenerics.java | rmulvey/bptest | 27a01bf313f7ef3746b6eb9da24ed8c79168323d | [
"Apache-2.0"
] | 1 | 2017-03-22T13:25:12.000Z | 2017-03-22T13:25:12.000Z | src/org.xtuml.bp.io.mdl.test/src/PkgCMGlobalsTestSuiteGenerics.java | rmulvey/bptest | 27a01bf313f7ef3746b6eb9da24ed8c79168323d | [
"Apache-2.0"
] | 5 | 2017-03-09T20:43:52.000Z | 2017-10-10T19:01:08.000Z | src/org.xtuml.bp.io.mdl.test/src/PkgCMGlobalsTestSuiteGenerics.java | keithbrown/bptest | c0b7a271c695c5e506d7fdb79a4be1924fc65695 | [
"Apache-2.0"
] | 30 | 2017-01-03T21:13:44.000Z | 2022-03-25T03:17:20.000Z | 41.234043 | 81 | 0.679051 | 6,204 | //=====================================================================
//
//File: $RCSfile: PkgCMGlobalsTestSuiteGenerics.java,v $
//Version: $Revision: 1.4 $
//Modified: $Date: 2013/01/10 23:13:04 $
//
//(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.xtuml.bp.io.mdl.test.GlobalTestSetupClass;
import org.xtuml.bp.io.mdl.test.PkgCMGlobalsTestGenerics;
import org.xtuml.bp.io.mdl.test.pkgcm.PkgCMCreateTestGenerics;
import org.xtuml.bp.io.mdl.test.pkgcm.PkgCMDeleteTestGenerics;
import org.xtuml.bp.io.mdl.test.pkgcm.PkgCMModifyContentsTestGenerics;
import org.xtuml.bp.io.mdl.test.pkgcm.PkgCMModifyRelationTestGenerics;
import org.xtuml.bp.io.mdl.test.pkgcm.PkgCMRenameTestGenerics;
import junit.framework.TestSuite;
@RunWith(Suite.class)
@Suite.SuiteClasses({
GlobalTestSetupClass.class,
PkgCMGlobalsTestGenerics.class,
PkgCMModifyContentsTestGenerics.class,
PkgCMModifyRelationTestGenerics.class,
PkgCMCreateTestGenerics.class,
PkgCMRenameTestGenerics.class,
PkgCMDeleteTestGenerics.class,
})
public class PkgCMGlobalsTestSuiteGenerics extends TestSuite {
} |
3e0e9fab8df6d01cfec549d9ccab484bb880581a | 1,560 | java | Java | platform/src/main/java/org/hillview/sketches/results/IntTopK.java | vmware/hiero | 6ada2cccb7233db17ffa4e2b93539acce256ddb5 | [
"Apache-2.0"
] | 94 | 2017-08-05T05:24:40.000Z | 2022-02-27T03:12:26.000Z | platform/src/main/java/org/hillview/sketches/results/IntTopK.java | vmware/hiero | 6ada2cccb7233db17ffa4e2b93539acce256ddb5 | [
"Apache-2.0"
] | 364 | 2017-07-27T20:26:36.000Z | 2021-10-20T13:38:34.000Z | platform/src/main/java/org/hillview/sketches/results/IntTopK.java | vmware/hiero | 6ada2cccb7233db17ffa4e2b93539acce256ddb5 | [
"Apache-2.0"
] | 28 | 2017-07-27T22:26:26.000Z | 2022-03-25T05:24:39.000Z | 37.142857 | 90 | 0.721154 | 6,205 | /*
* Copyright (c) 2019 VMware Inc. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hillview.sketches.results;
import it.unimi.dsi.fastutil.ints.Int2IntSortedMap;
/**
* Interface for computing the topK elements of a data set, ordered by a comparator, with
* counts for how often each of them occurs. This requires
* - Membership: is it already present?
* - Maximum: If not present, compare to the Maximum value currently in the Top K
* - Insertion: for adding a new element.
* We assume that all elements are positive.
* In general the values we insert in this data structure are indexes of the actual values
* in an array/column/list.
*/
public interface IntTopK {
/**
* @return a SortedMap of the top K elements, giving elements and their counts.
*/
Int2IntSortedMap getTopK();
/**
* Tries to add a new value newVal to the data structure.
* @param newVal value to add to the data structure.
*/
void push(int newVal);
}
|
3e0ea0d9b3e286df3fc2f335c3028c837a44248e | 5,506 | java | Java | src/main/java/com/github/nayasis/basica/base/format/Formatter.java | nayasis/basica | bbe726222262cb9b4aaf0876b5e3ecfe2a5bf634 | [
"Apache-2.0"
] | 3 | 2020-06-23T08:03:00.000Z | 2020-06-23T08:03:24.000Z | src/main/java/com/github/nayasis/basica/base/format/Formatter.java | nayasis/basica | bbe726222262cb9b4aaf0876b5e3ecfe2a5bf634 | [
"Apache-2.0"
] | 4 | 2019-11-06T06:48:15.000Z | 2020-04-29T05:08:57.000Z | src/main/java/com/github/nayasis/basica/base/format/Formatter.java | nayasis/basica | bbe726222262cb9b4aaf0876b5e3ecfe2a5bf634 | [
"Apache-2.0"
] | null | null | null | 32.19883 | 150 | 0.560116 | 6,206 | package com.github.nayasis.basica.base.format;
import com.github.nayasis.basica.reflection.Reflector;
import com.github.nayasis.basica.base.Characters;
import com.github.nayasis.basica.base.Strings;
import com.github.nayasis.basica.base.Types;
import com.github.nayasis.basica.base.format.function.Replacer;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
/**
* String formatter
*/
public class Formatter {
private static final Replacer bracketCompressor = text -> text.replaceAll( "\\{\\{", "{" ).replaceAll( "\\}\\}", "}" );
public static final ExtractPattern PATTERN_BASIC = new ExtractPattern( "\\{([^\\s\\{\\}]*?)\\}" ).replacer(bracketCompressor).escapeChar('{');
public static final ExtractPattern PATTERN_SHARP = new ExtractPattern( "#\\{([^\\s\\{\\}]*?)\\}" ).replacer(bracketCompressor);
public static final ExtractPattern PATTERN_DOLLAR = new ExtractPattern( "\\$\\{([^\\s\\{\\}]*?)\\}" ).replacer(bracketCompressor);
protected static final String FORMAT_INDEX = "_{{%d}}";
/**
* return binding parameters in string formatted
*
* @param pattern parameter extracting pattern
* @param format format string
* @param parameter binding parameter
* @param binder binder containing binding logic
* @param <T> This is the type parameter
* @return formatter string
*/
public <T> String bindParam( ExtractPattern pattern, Object format, T parameter, ParameterBinder<T> binder ) {
return bindParam( pattern, format, parameter, binder, false );
}
/**
* return binding parameters in string formatted
*
* @param pattern parameter extracting pattern
* @param format format string
* @param parameter binding parameter
* @param binder binder containing binding logic
* @param koreanModification flag whether modify korean JOSA characters
* @param <T> This is the type parameter
* @return formatter string
*/
public <T> String bindParam( ExtractPattern pattern, Object format, T parameter, ParameterBinder<T> binder, boolean koreanModification ) {
String source = Strings.nvl( format );
if( source.isEmpty() ) return source;
Matcher matcher = pattern.pattern().matcher( source );
StringBuilder sb = new StringBuilder();
int cursor = 0;
int index = 0;
while( matcher.find() ) {
String prefix = source.substring( cursor, matcher.start() );
if( pattern.isEscapable(prefix) ) {
continue;
}
sb.append( pattern.replacer().replace(prefix) );
Key key = new Key( matcher.group(1), index );
String value = binder.bind( key.name(), key.format(), parameter );
sb.append( value );
index++;
cursor = matcher.end();
if( koreanModification ) {
if( modifyKorean(value, cursor, sb, source) ) {
cursor++;
}
}
}
// add remains
sb.append( pattern.replacer().replace(source.substring(cursor)) );
return sb.toString();
}
private boolean modifyKorean( String val, int cursor, StringBuilder buffer, String source ) {
if( Strings.isEmpty(val) || cursor >= source.length() ) return false;
boolean hasJongsong = Characters.hasHangulJongsung( val.charAt( val.length() - 1 ) );
if( hasJongsong ) {
char josa = source.charAt( cursor );
switch ( josa ) {
case '은' : case '는' :
buffer.append( hasJongsong ? '은' : '는' ); return true;
case '이' : case '가' :
buffer.append( hasJongsong ? '이' : '가' ); return true;
case '을' : case '를' :
buffer.append( hasJongsong ? '을' : '를' ); return true;
}
}
return false;
}
public String format( Object format, Object... parameter ) {
// when null parameter inputted
if( parameter == null ) {
parameter = new Object[] { null };
}
if( parameter.length == 0 ) return Strings.nvl( format );
return bindParam( PATTERN_BASIC, format, toParam(parameter), (key, userFormat, param) -> {
Object val = param.get( key );
boolean exist = param.containsKey( key );
if( userFormat.isEmpty() ) {
if( val == null ) {
return exist ? null : "";
} else {
return val.toString();
}
} else {
return String.format( userFormat, val );
}
}, true );
}
@SuppressWarnings("unchecked")
private Map toParam( Object ... parameters ) {
Map params = new HashMap();
if( parameters.length == 1 ) {
if ( Types.isMap(parameters[0]) ) {
params.putAll( (Map) parameters[0] );
} else if ( ! Types.isImmutable(parameters[0]) ) {
try {
params.putAll( Reflector.toMapFrom(parameters[0]) );
} catch ( Exception e ) {}
}
}
int index = 0;
for( Object param : parameters ) {
params.put( String.format(FORMAT_INDEX, index++), param );
}
return params;
}
}
|
3e0ea14125600b99f090cbf445ad675b48cc2d97 | 1,734 | java | Java | src/main/java/com/haohan/platform/service/sys/modules/pds/api/entity/resp/admin/PdsRangeAmountBaseResp.java | haohanscm/pds | 94bebd021f409a3bd6d6c25e6ceb5439f0063df5 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/haohan/platform/service/sys/modules/pds/api/entity/resp/admin/PdsRangeAmountBaseResp.java | haohanscm/pds | 94bebd021f409a3bd6d6c25e6ceb5439f0063df5 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/haohan/platform/service/sys/modules/pds/api/entity/resp/admin/PdsRangeAmountBaseResp.java | haohanscm/pds | 94bebd021f409a3bd6d6c25e6ceb5439f0063df5 | [
"Apache-2.0"
] | null | null | null | 21.949367 | 74 | 0.659746 | 6,207 | package com.haohan.platform.service.sys.modules.pds.api.entity.resp.admin;
import com.fasterxml.jackson.annotation.JsonFormat;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
/**
* @author dy
* @create 2018/12/26
*/
public class PdsRangeAmountBaseResp implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private Date date; //货款日期
private BigDecimal buyAmount; //采购金额
private BigDecimal afterSaleAmount; //售后金额
private BigDecimal payAmount; //总计金额
private String status; // 结算状态
private String buyerId;
private String merchantId;
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public BigDecimal getBuyAmount() {
return buyAmount;
}
public void setBuyAmount(BigDecimal buyAmount) {
this.buyAmount = buyAmount;
}
public BigDecimal getAfterSaleAmount() {
return afterSaleAmount;
}
public void setAfterSaleAmount(BigDecimal afterSaleAmount) {
this.afterSaleAmount = afterSaleAmount;
}
public BigDecimal getPayAmount() {
return payAmount;
}
public void setPayAmount(BigDecimal payAmount) {
this.payAmount = payAmount;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getBuyerId() {
return buyerId;
}
public void setBuyerId(String buyerId) {
this.buyerId = buyerId;
}
public String getMerchantId() {
return merchantId;
}
public void setMerchantId(String merchantId) {
this.merchantId = merchantId;
}
}
|
3e0ea1474a9605e4152abc8b06ca4ebcc477edd1 | 10,615 | java | Java | networking/p2p/src/test/java/tech/pegasys/teku/networking/p2p/discovery/discv5/NodeRecordConverterTest.java | bgravenorst/teku | ec682763f4ea262ee7debc6424e8ee3706f885e5 | [
"Apache-2.0"
] | 233 | 2020-10-26T23:40:37.000Z | 2022-03-25T06:05:22.000Z | networking/p2p/src/test/java/tech/pegasys/teku/networking/p2p/discovery/discv5/NodeRecordConverterTest.java | MitchellTesla/teku | d133f21b75f2baa5314d43a8d036d4c4d2c9b939 | [
"Apache-2.0"
] | 930 | 2020-10-23T19:50:25.000Z | 2022-03-31T23:48:55.000Z | networking/p2p/src/test/java/tech/pegasys/teku/networking/p2p/discovery/discv5/NodeRecordConverterTest.java | MitchellTesla/teku | d133f21b75f2baa5314d43a8d036d4c4d2c9b939 | [
"Apache-2.0"
] | 95 | 2020-10-26T07:39:33.000Z | 2022-03-23T16:38:01.000Z | 40.799242 | 199 | 0.706805 | 6,208 | /*
* Copyright 2020 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.teku.networking.p2p.discovery.discv5;
import static org.assertj.core.api.Assertions.assertThat;
import static tech.pegasys.teku.networking.p2p.discovery.DiscoveryNetwork.ATTESTATION_SUBNET_ENR_FIELD;
import static tech.pegasys.teku.networking.p2p.discovery.DiscoveryNetwork.ETH2_ENR_FIELD;
import static tech.pegasys.teku.networking.p2p.discovery.DiscoveryNetwork.SYNC_COMMITTEE_SUBNET_ENR_FIELD;
import static tech.pegasys.teku.networking.p2p.discovery.discv5.NodeRecordConverter.convertToDiscoveryPeer;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Optional;
import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.units.bigints.UInt64;
import org.ethereum.beacon.discovery.schema.EnrField;
import org.ethereum.beacon.discovery.schema.IdentitySchema;
import org.ethereum.beacon.discovery.schema.NodeRecord;
import org.ethereum.beacon.discovery.schema.NodeRecordFactory;
import org.junit.jupiter.api.Test;
import tech.pegasys.teku.networking.p2p.discovery.DiscoveryPeer;
import tech.pegasys.teku.spec.Spec;
import tech.pegasys.teku.spec.TestSpecFactory;
import tech.pegasys.teku.spec.datastructures.networking.libp2p.rpc.EnrForkId;
import tech.pegasys.teku.spec.schemas.SchemaDefinitions;
import tech.pegasys.teku.spec.util.DataStructureUtil;
import tech.pegasys.teku.ssz.collections.SszBitvector;
import tech.pegasys.teku.ssz.schema.collections.SszBitvectorSchema;
class NodeRecordConverterTest {
private static final Spec SPEC = TestSpecFactory.createMinimalAltair();
private static final SchemaDefinitions SCHEMA_DEFINITIONS = SPEC.getGenesisSchemaDefinitions();
private static final Bytes PUB_KEY =
Bytes.fromHexString("0x0295A5A50F083697FF8557F3C6FE0CDF8E8EC2141D15F19A5A45571ED9C38CE181");
private static final Bytes IPV6_LOCALHOST =
Bytes.fromHexString("0x00000000000000000000000000000001");
private static final Optional<EnrForkId> ENR_FORK_ID = Optional.empty();
private static final SszBitvectorSchema<?> ATT_SUBNET_SCHEMA =
SCHEMA_DEFINITIONS.getAttnetsENRFieldSchema();
private static final SszBitvector ATTNETS = ATT_SUBNET_SCHEMA.getDefault();
private static final SszBitvectorSchema<?> SYNCNETS_SCHEMA =
SCHEMA_DEFINITIONS.getSyncnetsENRFieldSchema();
private static final SszBitvector SYNCNETS = SYNCNETS_SCHEMA.getDefault();
@Test
public void shouldConvertRealEnrToDiscoveryPeer() throws Exception {
final String enr =
"949d1u22cbffbrarjh182eig55721odj";
final NodeRecord nodeRecord = NodeRecordFactory.DEFAULT.fromBase64(enr);
final DiscoveryPeer expectedPeer =
new DiscoveryPeer(
Bytes.fromHexString(
"0x03B86ED9F747A7FA99963F39E3B176B45E9E863108A2D145EA3A4E76D8D0935194"),
new InetSocketAddress(InetAddress.getByAddress(new byte[] {127, 0, 0, 1}), 9000),
Optional.empty(),
ATTNETS,
SYNCNETS);
assertThat(convertToDiscoveryPeer(nodeRecord, SCHEMA_DEFINITIONS)).contains(expectedPeer);
}
@Test
public void shouldNotConvertRecordWithNoIp() {
assertThat(convertNodeRecordWithFields()).isEmpty();
}
@Test
public void shouldNotConvertRecordWithIpButNoPort() {
assertThat(
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V4, Bytes.wrap(new byte[] {127, 0, 0, 1}))))
.isEmpty();
}
@Test
public void shouldNotConvertRecordWithIpAndUdpPortButNoTcpPort() {
assertThat(
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V4, Bytes.wrap(new byte[] {127, 0, 0, 1})),
new EnrField(EnrField.UDP, 30303)))
.isEmpty();
}
@Test
public void shouldUseV4PortIfV6PortSpecifiedWithNoV6Ip() {
assertThat(
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST), new EnrField(EnrField.TCP, 30303)))
.contains(
new DiscoveryPeer(
PUB_KEY, new InetSocketAddress("::1", 30303), ENR_FORK_ID, ATTNETS, SYNCNETS));
}
@Test
public void shouldNotConvertRecordWithV4IpAndV6Port() {
assertThat(
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V4, IPV6_LOCALHOST), new EnrField(EnrField.TCP_V6, 30303)))
.isEmpty();
}
@Test
public void shouldNotConvertRecordWithPortButNoIp() {
assertThat(convertNodeRecordWithFields(new EnrField(EnrField.TCP, 30303))).isEmpty();
}
@Test
public void shouldConvertIpV4Record() {
// IP address bytes are unsigned. Make sure we handle that correctly.
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V4, Bytes.wrap(new byte[] {-127, 24, 31, 22})),
new EnrField(EnrField.TCP, 1234));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY,
new InetSocketAddress("172.16.17.32", 1234),
ENR_FORK_ID,
ATTNETS,
SYNCNETS));
}
@Test
public void shouldConvertIpV6Record() {
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST), new EnrField(EnrField.TCP_V6, 1234));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY, new InetSocketAddress("::1", 1234), ENR_FORK_ID, ATTNETS, SYNCNETS));
}
@Test
public void shouldConvertAttnets() {
SszBitvector persistentSubnets = ATT_SUBNET_SCHEMA.ofBits(1, 8, 14, 32);
Bytes encodedPersistentSubnets = persistentSubnets.sszSerialize();
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST),
new EnrField(EnrField.TCP_V6, 1234),
new EnrField(ATTESTATION_SUBNET_ENR_FIELD, encodedPersistentSubnets));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY,
new InetSocketAddress("::1", 1234),
ENR_FORK_ID,
persistentSubnets,
SYNCNETS));
}
@Test
public void shouldUseEmptyAttnetsWhenFieldValueIsInvalid() {
SszBitvector persistentSubnets = SszBitvectorSchema.create(4).ofBits(1, 2); // Incorrect length
Bytes encodedPersistentSubnets = persistentSubnets.sszSerialize();
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST),
new EnrField(EnrField.TCP_V6, 1234),
new EnrField(ATTESTATION_SUBNET_ENR_FIELD, encodedPersistentSubnets));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY,
new InetSocketAddress("::1", 1234),
ENR_FORK_ID,
ATT_SUBNET_SCHEMA.getDefault(),
SYNCNETS));
}
@Test
public void shouldConvertSyncnets() {
SszBitvector syncnets = SYNCNETS_SCHEMA.ofBits(1, 3);
Bytes encodedSyncnets = syncnets.sszSerialize();
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST),
new EnrField(EnrField.TCP_V6, 1234),
new EnrField(SYNC_COMMITTEE_SUBNET_ENR_FIELD, encodedSyncnets));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY, new InetSocketAddress("::1", 1234), ENR_FORK_ID, ATTNETS, syncnets));
}
@Test
public void shouldUseEmptySyncnetsFieldValueIsInvalid() {
SszBitvector syncnets =
SszBitvectorSchema.create(SYNCNETS_SCHEMA.getLength() * 2L)
.ofBits(1, 4); // Incorrect length
Bytes encodedSyncnets = syncnets.sszSerialize();
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST),
new EnrField(EnrField.TCP_V6, 1234),
new EnrField(SYNC_COMMITTEE_SUBNET_ENR_FIELD, encodedSyncnets));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY, new InetSocketAddress("::1", 1234), ENR_FORK_ID, ATTNETS, SYNCNETS));
}
@Test
public void shouldConvertEnrForkId() {
EnrForkId enrForkId = new DataStructureUtil().randomEnrForkId();
Bytes encodedForkId = enrForkId.sszSerialize();
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST),
new EnrField(EnrField.TCP_V6, 1234),
new EnrField(ETH2_ENR_FIELD, encodedForkId));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY,
new InetSocketAddress("::1", 1234),
Optional.of(enrForkId),
ATTNETS,
SYNCNETS));
}
@Test
public void shouldNotHaveEnrForkIdWhenValueIsInvalid() {
Bytes encodedForkId = Bytes.fromHexString("0x1234");
final Optional<DiscoveryPeer> result =
convertNodeRecordWithFields(
new EnrField(EnrField.IP_V6, IPV6_LOCALHOST),
new EnrField(EnrField.TCP_V6, 1234),
new EnrField(ETH2_ENR_FIELD, encodedForkId));
assertThat(result)
.contains(
new DiscoveryPeer(
PUB_KEY, new InetSocketAddress("::1", 1234), Optional.empty(), ATTNETS, SYNCNETS));
}
private Optional<DiscoveryPeer> convertNodeRecordWithFields(final EnrField... fields) {
return convertToDiscoveryPeer(createNodeRecord(fields), SCHEMA_DEFINITIONS);
}
private NodeRecord createNodeRecord(final EnrField... fields) {
final ArrayList<EnrField> fieldList = new ArrayList<>(Arrays.asList(fields));
fieldList.add(new EnrField(EnrField.ID, IdentitySchema.V4));
fieldList.add(new EnrField(EnrField.PKEY_SECP256K1, PUB_KEY));
return NodeRecordFactory.DEFAULT.createFromValues(UInt64.ZERO, fieldList);
}
}
|
3e0ea1e7c0c96965265bdcadbc4a2cad12a7886b | 4,840 | java | Java | bases de datos/proyecto/sqlParser/src/java/sources/mysql/SubSource.java | luisjimenez6245/escom | a1ae1f988d02f88844f5d29fba75e7cee04998db | [
"MIT"
] | null | null | null | bases de datos/proyecto/sqlParser/src/java/sources/mysql/SubSource.java | luisjimenez6245/escom | a1ae1f988d02f88844f5d29fba75e7cee04998db | [
"MIT"
] | null | null | null | bases de datos/proyecto/sqlParser/src/java/sources/mysql/SubSource.java | luisjimenez6245/escom | a1ae1f988d02f88844f5d29fba75e7cee04998db | [
"MIT"
] | 1 | 2020-03-03T04:16:42.000Z | 2020-03-03T04:16:42.000Z | 36.666667 | 196 | 0.573554 | 6,209 | package sources.mysql;
import controllers.security.Logger;
import controllers.security.Manager;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class SubSource {
private Executor exec;
private final Logger LOGGER = new Logger();
public String res;
public SubSource() {
}
public boolean check(String query, String dbName, String url) {
res = query.toLowerCase();
try {
try {
exec = new Executor(Manager.getInstance().getProperty("dbTestUser"), Manager.getInstance().getProperty("dbTestPassword"), dbName, url, Manager.getInstance().getProperty("dbPort"));
if (res.contains("select")) {
checkQuery(query);
} else {
checkUpdate(query);
}
exec.closeConnection();
res = "";
} catch (SQLException ex) {
res = ex.getMessage();
}
exec.closeConnection();
} catch (SQLException ex) {
LOGGER.error(ex);
res = ex.getSQLState();
}
return !res.toLowerCase().contains("your sql syntax");
}
private void checkQuery(String query) throws SQLException {
exec.executeQuery(query);
}
private void checkUpdate(String query) throws SQLException {
exec.executeUpdate(query);
}
private class Executor {
private final String DRIVERCLASSNAME = "com.mysql.jdbc.Driver";
private final String BASEURL = "jdbc:mysql://$url$:$port$/dbName?allowPublicKeyRetrieval=true&useSSL=false&useServerPrepStmts=true";
private Connection connection = null;
public Executor(String user, String password, String dbName, String url, String port) {
connection = conectar(user, password, dbName, url, port);
}
public void closeConnection() throws SQLException {
if (!connection.isClosed()) {
connection.close();
}
}
private Connection conectar(String user, String password, String dbName, String url, String port) {
try {
String urlBD = BASEURL;
urlBD = urlBD.replace("dbName", dbName).replace("$url$", url).replace("$port$", port);
Class.forName(this.DRIVERCLASSNAME).newInstance();
return DriverManager.getConnection(urlBD, user, password);
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException | SQLException ex) {
System.out.println(ex.fillInStackTrace());
}
return null;
}
public ResultSet executeQuery(String query, Object[] parameters) throws SQLException {
PreparedStatement state = this.connection.prepareStatement(query);
if (parameters != null) {
for (int i = 0; i < parameters.length; ++i) {
state.setObject(i + 1, parameters[i]);
}
}
return state.executeQuery();
}
public int executeUpdate(String query, Object[] parameters) throws SQLException {
PreparedStatement state;
if (query.toLowerCase().contains("insert")) {
state = this.connection.prepareStatement(query, PreparedStatement.RETURN_GENERATED_KEYS);
} else {
state = this.connection.prepareStatement(query);
}
if (parameters != null) {
for (int i = 0; i < parameters.length; ++i) {
state.setObject(i + 1, parameters[i]);
}
}
state.execute();
if (query.toLowerCase().contains("insert")) {
ResultSet res = state.getGeneratedKeys();
return res.next() ? res.getInt(1) : 0;
}
return 0;
}
public int executeUpdate(String query) throws SQLException {
Statement state;
if (query.toLowerCase().contains("insert")) {
state = this.connection.createStatement(1, PreparedStatement.RETURN_GENERATED_KEYS);
} else {
state = this.connection.createStatement();
}
state.execute(query);
if (query.toLowerCase().contains("insert")) {
ResultSet res = state.getGeneratedKeys();
return res.next() ? res.getInt(1) : 0;
}
return 0;
}
public ResultSet executeQuery(String query) throws SQLException {
Statement state = this.connection.createStatement();
return state.executeQuery(query);
}
}
}
|
3e0ea244cc54e46d904ac55d0724bc19ca67e19b | 2,606 | java | Java | src/main/java/com/forte/qqrobot/sender/senderlist/GetterUpper.java | yuanhan1993/simple-robot-core | c36ef819d489c18dc435d934cd67338e5d927ca3 | [
"Apache-2.0"
] | 1 | 2020-05-15T17:30:10.000Z | 2020-05-15T17:30:10.000Z | src/main/java/com/forte/qqrobot/sender/senderlist/GetterUpper.java | yuanhan1993/simple-robot-core | c36ef819d489c18dc435d934cd67338e5d927ca3 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/forte/qqrobot/sender/senderlist/GetterUpper.java | yuanhan1993/simple-robot-core | c36ef819d489c18dc435d934cd67338e5d927ca3 | [
"Apache-2.0"
] | null | null | null | 19.684211 | 77 | 0.592055 | 6,210 | package com.forte.qqrobot.sender.senderlist;
import com.forte.qqrobot.beans.messages.get.*;
import com.forte.qqrobot.beans.messages.result.*;
/**
* Getter系列方法增强,为方法提供封装类作为参数的方法
* @author ForteScarlet <anpch@example.com>
* @since JDK1.8
**/
public interface GetterUpper extends Getter {
/**
* 取匿名成员信息
* 一般是使用匿名标识来获取
*/
default AnonInfo getAnonInfo(GetAnonInfo get){
return getAnonInfo(get.getFlag());
}
// /**
// * 获取权限信息
// * 一般不需要参数
// * @return 权限信息
// */
// AuthInfo getAuthInfo();
/**
* 获取封禁成员列表
*/
default BanList getBanList(GetBanList get){
return getBanList(get.getGroup());
}
/**
* 获取群文件信息
*/
default FileInfo getFileInfo(GetFileInfo get){
return getFileInfo(get.getFlag());
}
// /**
// * 获取好友列表
// * @return 好友列表
// */
// FriendList getFriendList();
/**
* 取群作业列表
*/
default GroupHomeworkList getGroupHomeworkList(GetGroupHomeworkList get){
return getGroupHomeworkList(get.getGroup());
}
/**
* 取群信息
*/
default GroupInfo getGroupInfo(GetGroupInfo get){
return getGroupInfo(get.getGroup());
}
/**
* 取群连接列表
*/
default GroupLinkList getGroupLinkList(GetGroupLinkList get){
return getGroupLinkList(get.getGroup());
}
// /**
// * 取群列表
// * @return 群列表
// */
// GroupList getGroupList();
/**
* 取群成员信息
*/
default GroupMemberInfo getGroupMemberInfo(GetGroupMemberInfo get){
return getGroupMemberInfo(get.getGroup(), get.getQQ());
}
/**
* 取群成员列表
*/
default GroupMemberList getGroupMemberList(GetGroupMemberList get){
return getGroupMemberList(get.getGroup());
}
/**
* 取群公告列表
*/
default GroupNoteList getGroupNoteList(GetGroupNoteList get){
return getGroupNoteList(get.getGroup());
}
/**
* 取置顶群公告
*/
default GroupTopNote getGroupTopNote(GetGroupTopNote get){
return getGroupTopNote(get.getGroup());
}
/**
* 获取图片信息
*/
default ImageInfo getImageInfo(GetImageInfo get){
return getImageInfo(get.getFlag());
}
// /**
// * 获取登录的QQ的信息
// * @return 登录QQ的信息
// */
// LoginQQInfo getLoginQQInfo();
/**
* 获取群共享文件列表
*/
default ShareList getShareList(GetShareList get){
return getShareList(get.getGroup());
}
/**
* 取陌生人信息
*/
default StrangerInfo getStrangerInfo(GetStrangerInfo get){
return getStrangerInfo(get.getQQ());
}
}
|
3e0ea255f1e3d0fabe6764adeb5210feed2063dd | 1,311 | java | Java | core/src/main/java/com/zaubersoftware/gnip4j/api/model/ruleValidation/RuleValidation.java | zauberlabs/gnip4j | 915b09d58a79ddd942d65a30dcf7cbd2bb7ea316 | [
"Apache-2.0"
] | 17 | 2015-02-12T17:51:52.000Z | 2022-03-30T07:09:58.000Z | core/src/main/java/com/zaubersoftware/gnip4j/api/model/ruleValidation/RuleValidation.java | zauberlabs/gnip4j | 915b09d58a79ddd942d65a30dcf7cbd2bb7ea316 | [
"Apache-2.0"
] | 34 | 2015-01-19T13:27:43.000Z | 2018-06-15T11:15:53.000Z | core/src/main/java/com/zaubersoftware/gnip4j/api/model/ruleValidation/RuleValidation.java | zauberlabs/gnip4j | 915b09d58a79ddd942d65a30dcf7cbd2bb7ea316 | [
"Apache-2.0"
] | 27 | 2015-01-19T13:17:14.000Z | 2019-04-23T02:12:35.000Z | 21.491803 | 59 | 0.587338 | 6,211 | package com.zaubersoftware.gnip4j.api.model.ruleValidation;
import java.io.Serializable;
import org.codehaus.jackson.annotate.JsonProperty;
import com.zaubersoftware.gnip4j.api.model.Rule;
public class RuleValidation implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty(value = "valid")
private boolean valid;
@JsonProperty(value = "rule")
private Rule rule;
@JsonProperty(value = "message")
private String message;
public boolean isValid() {
return valid;
}
public void setValid(final boolean valid) {
this.valid = valid;
}
public Rule getRule() {
return rule;
}
public void setRule(final Rule rule) {
this.rule = rule;
}
public String getMessage() {
return message;
}
public void setMessage(final String message) {
this.message = message;
}
@Override
public String toString() {
return new StringBuilder("RuleValidation[")
.append("valid=")
.append(valid)
.append(' ')
.append("rule=")
.append(rule)
.append(' ')
.append("message=")
.append(getMessage())
.append(']')
.toString();
}
}
|
3e0ea40ad684615fb43bf4167315777ca940f4be | 243 | java | Java | mawaccess-common/src/main/java/com/omernaci/mawaccess/common/xml/Resultset.java | omernaci/mawaccess | 2b0ea74512f543de6114a5a7ff28f898a289de5c | [
"MIT"
] | 1 | 2020-09-14T21:10:36.000Z | 2020-09-14T21:10:36.000Z | mawaccess-common/src/main/java/com/omernaci/mawaccess/common/xml/Resultset.java | omernaci/mawaccess | 2b0ea74512f543de6114a5a7ff28f898a289de5c | [
"MIT"
] | null | null | null | mawaccess-common/src/main/java/com/omernaci/mawaccess/common/xml/Resultset.java | omernaci/mawaccess | 2b0ea74512f543de6114a5a7ff28f898a289de5c | [
"MIT"
] | null | null | null | 15.1875 | 48 | 0.769547 | 6,212 | package com.omernaci.mawaccess.common.xml;
import lombok.Data;
import javax.xml.bind.annotation.XmlRootElement;
@Data
@XmlRootElement(name="resultset")
public class Resultset {
private Summary summary;
private Results results;
}
|
3e0ea44b1be615306d72df8c35dc1e10057f6110 | 1,533 | java | Java | impl/src/main/java/org/apache/myfaces/view/facelets/compiler/LiteralCommentInstruction.java | benkard/myfaces | 4f320aacd3f03b5e42de768fb9273013353108a8 | [
"Apache-2.0"
] | 102 | 2015-02-21T02:55:59.000Z | 2022-03-18T17:28:32.000Z | impl/src/main/java/org/apache/myfaces/view/facelets/compiler/LiteralCommentInstruction.java | benkard/myfaces | 4f320aacd3f03b5e42de768fb9273013353108a8 | [
"Apache-2.0"
] | 72 | 2018-06-22T14:12:49.000Z | 2022-03-18T16:57:29.000Z | impl/src/main/java/org/apache/myfaces/view/facelets/compiler/LiteralCommentInstruction.java | bohmber/myfaces | 0552df597e3a9eaf7a9288c5693b786e8d259baa | [
"Apache-2.0"
] | 82 | 2015-02-21T02:56:00.000Z | 2022-03-10T16:12:10.000Z | 28.388889 | 70 | 0.727984 | 6,213 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.view.facelets.compiler;
import java.io.IOException;
import jakarta.el.ELContext;
import jakarta.el.ExpressionFactory;
import jakarta.faces.context.FacesContext;
final class LiteralCommentInstruction implements Instruction
{
private final String text;
public LiteralCommentInstruction(String text)
{
this.text = text;
}
@Override
public void write(FacesContext context) throws IOException
{
context.getResponseWriter().writeComment(this.text);
}
@Override
public Instruction apply(ExpressionFactory factory, ELContext ctx)
{
return this;
}
@Override
public boolean isLiteral()
{
return true;
}
}
|
3e0ea44cc2f869988fbfd55039a78e3010b65f95 | 1,143 | java | Java | src/main/java/org/sherlockyb/leetcode/string/longestCommonPrefix/Solution.java | sherlock-y/LeetCode | 089272c030afc529ff77aa8d931fb285191f58f0 | [
"Apache-2.0"
] | 2 | 2018-04-01T14:00:08.000Z | 2018-10-22T15:50:37.000Z | src/main/java/org/sherlockyb/leetcode/string/longestCommonPrefix/Solution.java | sherlock-y/LeetCode | 089272c030afc529ff77aa8d931fb285191f58f0 | [
"Apache-2.0"
] | 1 | 2022-03-13T11:41:23.000Z | 2022-03-13T11:41:23.000Z | src/main/java/org/sherlockyb/leetcode/string/longestCommonPrefix/Solution.java | sherlock-y/LeetCode | 089272c030afc529ff77aa8d931fb285191f58f0 | [
"Apache-2.0"
] | null | null | null | 25.977273 | 78 | 0.425197 | 6,214 | package org.sherlockyb.leetcode.string.longestCommonPrefix;
/**
* Created by yangbing on 2017/4/12.
*/
public class Solution {
public String longestCommonPrefix(String[] strs) {
if (strs == null || strs.length <= 0) {
return "";
}
if (strs.length == 1) {
return strs[0];
}
StringBuilder sBuilder = new StringBuilder();
int idx = 0;
char curCh;
boolean same;
boolean shouldBreak = false;
while (true) {
if (idx >= strs[0].length()) {
break;
}
curCh = strs[0].charAt(idx);
same = true;
for (int i = 1; i < strs.length; i++) {
if (idx >= strs[i].length() || curCh != strs[i].charAt(idx)) {
same = false;
shouldBreak = true;
break;
}
}
if (same) {
sBuilder.append(curCh);
}
idx++;
if (shouldBreak) {
break;
}
}
return sBuilder.toString();
}
}
|
3e0ea473327f71ae9399afa5763c64360aeeb13f | 2,003 | java | Java | converter-annotation-processor/src/main/java/com/careem/annotation/processing/converter/ConverterAnnotationProcessor.java | careem/converter-codegen | 5cd6c6e8dd07e09a81a5d7fb753b09c30bb000c0 | [
"Apache-2.0"
] | 6 | 2020-06-03T07:18:20.000Z | 2020-06-21T15:07:06.000Z | converter-annotation-processor/src/main/java/com/careem/annotation/processing/converter/ConverterAnnotationProcessor.java | careem/converter-codegen | 5cd6c6e8dd07e09a81a5d7fb753b09c30bb000c0 | [
"Apache-2.0"
] | 4 | 2020-06-02T20:23:32.000Z | 2020-06-19T13:16:19.000Z | converter-annotation-processor/src/main/java/com/careem/annotation/processing/converter/ConverterAnnotationProcessor.java | careem/converter-codegen | 5cd6c6e8dd07e09a81a5d7fb753b09c30bb000c0 | [
"Apache-2.0"
] | 2 | 2020-06-01T15:17:53.000Z | 2020-06-01T15:52:13.000Z | 40.877551 | 101 | 0.77334 | 6,215 | package com.careem.annotation.processing.converter;
import com.careem.annotation.processing.converter.generator.ConverterGenerator;
import com.careem.annotation.processing.converter.generator.mapper.custom.UserConfiguredMapper;
import com.careem.annotation.processing.converter.generator.mapper.preset.DefaultMapper;
import com.careem.annotation.processing.helper.generator.util.WriterProvider;
import com.google.auto.service.AutoService;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Processor;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedSourceVersion;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.TypeElement;
import javax.tools.Diagnostic;
import java.util.Arrays;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Annotation processor class for generating converters
*/
@AutoService(Processor.class)
@SupportedAnnotationTypes({
"com.careem.annotation.processing.converter.annotation.Converter",
"com.careem.annotation.processing.converter.annotation.Converters"
})
@SupportedSourceVersion(SourceVersion.RELEASE_11)
public class ConverterAnnotationProcessor extends AbstractProcessor {
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
final WriterProvider writerProvider = new WriterProvider();
try {
new ConverterGenerator(
processingEnv,
writerProvider,
Arrays.asList(new UserConfiguredMapper(processingEnv, roundEnv), new DefaultMapper())
).process(annotations, roundEnv);
} catch (Exception e) {
Logger.getGlobal().log(Level.SEVERE, e.getMessage(), e);
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, e.getMessage());
}
return false;
}
}
|
3e0ea487c4ec2742d31349541e831f4932a15183 | 4,091 | java | Java | micrometer-core/src/test/java/io/micrometer/core/instrument/binder/httpcomponents/MicrometerHttpClientInterceptorTest.java | master-starcloud/micrometer | faedc1cbc73a9ff0eb99e265a762b63ade83d7f7 | [
"Apache-2.0"
] | 3,292 | 2017-07-21T02:33:05.000Z | 2022-03-31T16:15:23.000Z | micrometer-core/src/test/java/io/micrometer/core/instrument/binder/httpcomponents/MicrometerHttpClientInterceptorTest.java | master-starcloud/micrometer | faedc1cbc73a9ff0eb99e265a762b63ade83d7f7 | [
"Apache-2.0"
] | 2,355 | 2017-07-20T21:57:08.000Z | 2022-03-31T22:55:05.000Z | micrometer-core/src/test/java/io/micrometer/core/instrument/binder/httpcomponents/MicrometerHttpClientInterceptorTest.java | master-starcloud/micrometer | faedc1cbc73a9ff0eb99e265a762b63ade83d7f7 | [
"Apache-2.0"
] | 892 | 2017-07-24T13:44:05.000Z | 2022-03-30T19:48:07.000Z | 39.336538 | 134 | 0.729406 | 6,216 | /**
* Copyright 2020 VMware, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* https://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.core.instrument.binder.httpcomponents;
import com.github.tomakehurst.wiremock.WireMockServer;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.MockClock;
import io.micrometer.core.instrument.Tags;
import io.micrometer.core.instrument.simple.SimpleConfig;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClients;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import ru.lanwen.wiremock.ext.WiremockResolver;
import java.util.concurrent.Future;
import static com.github.tomakehurst.wiremock.client.WireMock.any;
import static com.github.tomakehurst.wiremock.client.WireMock.anyUrl;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link MicrometerHttpClientInterceptor}.
*
* @author Jon Schneider
* @author Johnny Lim
*/
@ExtendWith(WiremockResolver.class)
class MicrometerHttpClientInterceptorTest {
private MeterRegistry registry;
@BeforeEach
void setup() {
registry = new SimpleMeterRegistry(SimpleConfig.DEFAULT, new MockClock());
}
@Test
void asyncRequest(@WiremockResolver.Wiremock WireMockServer server) throws Exception {
server.stubFor(any(anyUrl()));
CloseableHttpAsyncClient client = asyncClient();
client.start();
HttpGet request = new HttpGet(server.baseUrl());
Future<HttpResponse> future = client.execute(request, null);
HttpResponse response = future.get();
assertThat(response.getStatusLine().getStatusCode()).isEqualTo(200);
assertThat(registry.get("httpcomponents.httpclient.request").timer().count()).isEqualTo(1);
client.close();
}
@Test
void uriIsReadFromHttpHeader(@WiremockResolver.Wiremock WireMockServer server) throws Exception {
server.stubFor(any(anyUrl()));
MicrometerHttpClientInterceptor interceptor = new MicrometerHttpClientInterceptor(registry, Tags.empty(), true);
CloseableHttpAsyncClient client = asyncClient(interceptor);
client.start();
HttpGet request = new HttpGet(server.baseUrl());
request.addHeader(DefaultUriMapper.URI_PATTERN_HEADER, "/some/pattern");
Future<HttpResponse> future = client.execute(request, null);
HttpResponse response = future.get();
assertThat(response.getStatusLine().getStatusCode()).isEqualTo(200);
assertThat(registry.get("httpcomponents.httpclient.request").tag("uri", "/some/pattern").tag("status", "200").timer().count())
.isEqualTo(1);
client.close();
}
private CloseableHttpAsyncClient asyncClient() {
MicrometerHttpClientInterceptor interceptor = new MicrometerHttpClientInterceptor(registry,
request -> request.getRequestLine().getUri(),
Tags.empty(),
true);
return asyncClient(interceptor);
}
private CloseableHttpAsyncClient asyncClient(MicrometerHttpClientInterceptor interceptor) {
return HttpAsyncClients.custom()
.addInterceptorFirst(interceptor.getRequestInterceptor())
.addInterceptorLast(interceptor.getResponseInterceptor())
.build();
}
}
|
3e0ea4e1e3808b62282e028903ca7802ae0d94da | 317 | java | Java | jeecg-cloud-module/auto-test/src/main/java/org/jeecg/modules/uut/service/IRunningUutVersionService.java | zhanglailong/auto-test | 53ca1b92ac0b8b17053ef665ea6092302a21bdbb | [
"MIT"
] | null | null | null | jeecg-cloud-module/auto-test/src/main/java/org/jeecg/modules/uut/service/IRunningUutVersionService.java | zhanglailong/auto-test | 53ca1b92ac0b8b17053ef665ea6092302a21bdbb | [
"MIT"
] | null | null | null | jeecg-cloud-module/auto-test/src/main/java/org/jeecg/modules/uut/service/IRunningUutVersionService.java | zhanglailong/auto-test | 53ca1b92ac0b8b17053ef665ea6092302a21bdbb | [
"MIT"
] | null | null | null | 24.384615 | 80 | 0.829653 | 6,217 | package org.jeecg.modules.uut.service;
import com.baomidou.mybatisplus.extension.service.IService;
import org.jeecg.modules.uut.entity.RunningUutVersion;
import java.util.List;
public interface IRunningUutVersionService extends IService<RunningUutVersion> {
String getProjectTurnVersion(String versionId);
}
|
3e0ea52c638369b39255815446b984c0b7a835ea | 1,139 | java | Java | src/test/java/org/sparta/springwebutils/controller/ClassAnnotatedController.java | SpartaTech/-sparta-spring-web-utils | ad86aead6f1b98c30cb621aa4bb25fd4fa3173b3 | [
"Apache-2.0"
] | 1 | 2018-01-30T05:25:09.000Z | 2018-01-30T05:25:09.000Z | src/test/java/org/sparta/springwebutils/controller/ClassAnnotatedController.java | SpartaTech/-sparta-spring-web-utils | ad86aead6f1b98c30cb621aa4bb25fd4fa3173b3 | [
"Apache-2.0"
] | 2 | 2022-01-07T00:50:25.000Z | 2022-01-22T00:04:57.000Z | src/test/java/org/sparta/springwebutils/controller/ClassAnnotatedController.java | SpartaTech/-sparta-spring-web-utils | ad86aead6f1b98c30cb621aa4bb25fd4fa3173b3 | [
"Apache-2.0"
] | 1 | 2022-03-29T22:16:17.000Z | 2022-03-29T22:16:17.000Z | 36.741935 | 117 | 0.800702 | 6,218 | package org.sparta.springwebutils.controller;
import org.sparta.springwebutils.annotation.ExternalEntryPoint;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
/**
* @author Carlos Eduardo Endler Genz – Sparta Java Team
*
* History:
* Mar 6, 2014 - Carlos Eduardo Endler Genz
*
*/
@Controller
@ExternalEntryPoint(typeBlacklist=Object.class, nameBlacklist={"out", "out_two"})
public class ClassAnnotatedController {
@RequestMapping(value="/classAnnotatedController/testTypeBlacklist", method=RequestMethod.POST)
public ModelAndView testTypeBlacklist(String inOne, Integer inTwo, Object fileOut) {
return new ModelAndView();
}
@RequestMapping(value="/classAnnotatedController/testNameBlacklist", method=RequestMethod.GET)
public ModelAndView testNameBlacklist(String out, Integer in, Boolean inToo, @RequestParam("out_two") Long outToo) {
return new ModelAndView();
}
}
|
3e0ea548c6806f52f77a5f3eaec485028740f4ce | 294 | java | Java | pic-server/src/main/java/com/megatron/picserver/utils/RandomUtil.java | zhouhaoyan/littleApp | ac8d9577f409741ca385e6a7b78e624abbc3a98f | [
"Apache-2.0"
] | 1 | 2018-09-27T19:15:46.000Z | 2018-09-27T19:15:46.000Z | pic-server/src/main/java/com/megatron/picserver/utils/RandomUtil.java | zhouhaoyan/littleApp | ac8d9577f409741ca385e6a7b78e624abbc3a98f | [
"Apache-2.0"
] | null | null | null | pic-server/src/main/java/com/megatron/picserver/utils/RandomUtil.java | zhouhaoyan/littleApp | ac8d9577f409741ca385e6a7b78e624abbc3a98f | [
"Apache-2.0"
] | null | null | null | 17.294118 | 49 | 0.738095 | 6,219 | package com.megatron.picserver.utils;
import org.apache.commons.lang.RandomStringUtils;
public class RandomUtil {
public static String random(int len){
String str = "0123456789";
return RandomStringUtils.random(len, str);
}
public static String random(){
return random(4);
}
}
|
3e0ea6561f703cb3d949b3ef6b38b201723536a0 | 4,648 | java | Java | flink-core/src/test/java/org/apache/flink/testutils/DeeplyEqualsChecker.java | Shih-Wei-Hsu/flink | 3fed93d62d8f79627d4ded2dd1fae6fae91b36e8 | [
"MIT",
"Apache-2.0",
"MIT-0",
"BSD-3-Clause"
] | 41 | 2018-11-14T04:05:42.000Z | 2022-02-09T10:39:23.000Z | flink-core/src/test/java/org/apache/flink/testutils/DeeplyEqualsChecker.java | Shih-Wei-Hsu/flink | 3fed93d62d8f79627d4ded2dd1fae6fae91b36e8 | [
"MIT",
"Apache-2.0",
"MIT-0",
"BSD-3-Clause"
] | 15 | 2021-06-13T18:06:12.000Z | 2022-02-09T22:40:04.000Z | flink-core/src/test/java/org/apache/flink/testutils/DeeplyEqualsChecker.java | houmaozheng/flink | ef692d0967daf8f532d9011122e1d3104a07fb39 | [
"Apache-2.0"
] | 16 | 2019-01-04T09:19:03.000Z | 2022-01-10T14:34:31.000Z | 29.605096 | 118 | 0.694492 | 6,220 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.testutils;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.types.Row;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.BiFunction;
/**
* Deep equality checker for tests. It performs deep checks for objects which have no proper deepEquals methods like:
* <ul>
* <li>{@link Tuple}s</li>
* <li>Java arrays</li>
* <li>{@link Row}</li>
* <li>{@link Throwable}</li>
* </ul>
*
* <p>One can also provide custom check for additional categories of objects with
* {@link DeeplyEqualsChecker#withCustomCheck(BiFunction, CustomEqualityChecker)}. This is used e.g. in scala's tests.
*/
public class DeeplyEqualsChecker {
/**
* Checker that compares o1 and o2 objects if they are deeply equal.
*
* <p><b>NOTE:</b> All nested comparisons should be done through checker.
*/
public interface CustomEqualityChecker {
boolean check(Object o1, Object o2, DeeplyEqualsChecker checker);
}
private final List<Tuple2<BiFunction<Object, Object, Boolean>, CustomEqualityChecker>> customCheckers
= new ArrayList<>();
/**
* Adds custom check. Those check are always performed first, only after that it fallbacks to default checks.
*
* @param shouldCheck function to evaluate if the objects should be compared with comparator
* @param comparator to perform equality comparison if the shouldCheck passed
* @return checker with added custom checks
*/
public DeeplyEqualsChecker withCustomCheck(
BiFunction<Object, Object, Boolean> shouldCheck,
CustomEqualityChecker comparator) {
customCheckers.add(Tuple2.of(shouldCheck, comparator));
return this;
}
public boolean deepEquals(Object o1, Object o2) {
if (o1 == o2) {
return true;
} else if (o1 == null || o2 == null) {
return false;
} else {
return customCheck(o1, o2).orElseGet(() -> deepEquals0(o1, o2));
}
}
private Optional<Boolean> customCheck(Object o1, Object o2) {
return customCheckers.stream()
.filter(checker -> checker.f0.apply(o1, o2))
.findAny()
.map(checker -> checker.f1.check(o1, o2, this));
}
private boolean deepEquals0(Object e1, Object e2) {
if (e1.getClass().isArray() && e2.getClass().isArray()) {
return deepEqualsArray(e1, e2);
} else if (e1 instanceof Tuple && e2 instanceof Tuple) {
return deepEqualsTuple((Tuple) e1, (Tuple) e2);
} else if (e1 instanceof Row && e2 instanceof Row) {
return deepEqualsRow((Row) e1, (Row) e2);
} else if (e1 instanceof Throwable && e2 instanceof Throwable) {
return ((Throwable) e1).getMessage().equals(((Throwable) e2).getMessage());
} else {
return e1.equals(e2);
}
}
private boolean deepEqualsTuple(Tuple tuple1, Tuple tuple2) {
if (tuple1.getArity() != tuple2.getArity()) {
return false;
}
for (int i = 0; i < tuple1.getArity(); i++) {
Object o1 = tuple1.getField(i);
Object o2 = tuple2.getField(i);
if (!deepEquals(o1, o2)) {
return false;
}
}
return true;
}
private boolean deepEqualsArray(Object array1, Object array2) {
int length1 = Array.getLength(array1);
int length2 = Array.getLength(array2);
if (length1 != length2) {
return false;
}
for (int i = 0; i < length1; i++) {
Object o1 = Array.get(array1, i);
Object o2 = Array.get(array2, i);
if (!deepEquals(o1, o2)) {
return false;
}
}
return true;
}
private boolean deepEqualsRow(Row row1, Row row2) {
int arity = row1.getArity();
if (row1.getArity() != row2.getArity()) {
return false;
}
for (int i = 0; i < arity; i++) {
Object copiedValue = row1.getField(i);
Object element = row2.getField(i);
if (!deepEquals(copiedValue, element)) {
return false;
}
}
return true;
}
}
|
3e0ea6607331cc9c47339419d983ea4ec208f101 | 4,876 | java | Java | src/vo/Emprestimo.java | henriquehrf/SystemAutonet | c7048cec5f5d17c5d195883eec7907afb1356e3e | [
"MIT"
] | null | null | null | src/vo/Emprestimo.java | henriquehrf/SystemAutonet | c7048cec5f5d17c5d195883eec7907afb1356e3e | [
"MIT"
] | null | null | null | src/vo/Emprestimo.java | henriquehrf/SystemAutonet | c7048cec5f5d17c5d195883eec7907afb1356e3e | [
"MIT"
] | null | null | null | 32.724832 | 207 | 0.709188 | 6,221 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package vo;
import DAO.EntidadeBase;
import enumm.StatusEmprestimo;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Temporal;
/**
*
* @author Eduardo
*/
@Entity
@NamedQueries({
@NamedQuery(name = "Emprestimo.BuscarTodos", query = "Select e from Emprestimo e ORDER BY(e.dt_emprestimo)"),
@NamedQuery(name = "Emprestimo.BuscarPorStatusTodos", query = "Select e from Emprestimo e WHERE e.status_emprestimo = :Status"),
@NamedQuery(name = "Emprestimo.BuscarPorStatusPessoa", query = "Select e from Emprestimo e WHERE e.status_emprestimo = :Status AND e.id_pessoa_solicita.id_pessoa = :idpessoa ORDER BY(e.dt_emprestimo)"),
@NamedQuery(name = "Emprestimo.BuscarPorIdPessoa",
query = "Select e from Emprestimo e WHERE e.id_pessoa_solicita.id_pessoa = :idPessoaSolicita"),
@NamedQuery(name = "Emprestimo.BuscarPorIdPessoaStatusRetirado",
query = "Select e from Emprestimo e WHERE e.id_pessoa_solicita.id_pessoa = :idPessoaSolicita AND e.status_emprestimo like 'RETIRADO'"),
@NamedQuery(name = "Emprestimo.BuscarPorIdPessoaStatusESPERANDO_ANALISE",
query = "Select e from Emprestimo e WHERE e.id_pessoa_solicita.id_pessoa = :idPessoaSolicita AND e.status_emprestimo like 'ESPERANDO_ANALISE'")
})
public class Emprestimo implements Serializable, EntidadeBase {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE)
private Long id_emprestimo;
@Temporal(javax.persistence.TemporalType.DATE)
private Date dt_emprestimo;
@Column(length = 20, nullable = false)
@Enumerated(EnumType.STRING)
private StatusEmprestimo status_emprestimo;
@Column(length = 100, nullable = false)
private String finalidade;
@Column(length = 200, nullable = false)
private String observacao;
@ManyToOne(fetch = FetchType.LAZY)
private Pessoa id_pessoa_solicita;
@ManyToOne(fetch = FetchType.LAZY)
private Pessoa id_pessoa_autoriza = null;
public Long getId() {
return id_emprestimo;
}
public void setId_emprestimo(Long id_emprestimo) {
this.id_emprestimo = id_emprestimo;
}
public Date getDt_emprestimo() {
return dt_emprestimo;
}
public String getDt_emprestimoString() {
SimpleDateFormat dt = new SimpleDateFormat("dd-MM-yyyy");
return dt.format(dt_emprestimo);
}
public LocalDate getDt_emprestimoLocalDate() {
Instant instant = Instant.ofEpochMilli(dt_emprestimo.getTime());
LocalDate localDate = LocalDateTime.ofInstant(instant, ZoneId.systemDefault()).toLocalDate();
return localDate;
}
public void setDt_emprestimo(Date dt_emprestimo) {
this.dt_emprestimo = dt_emprestimo;
}
public StatusEmprestimo getStatus_emprestimo() {
return status_emprestimo;
}
public void setStatus_emprestimo(StatusEmprestimo status_emprestimo) {
this.status_emprestimo = status_emprestimo;
}
public String getFinalidade() {
return finalidade;
}
public void setFinalidade(String finalidade) {
this.finalidade = finalidade.toUpperCase();
}
public String getObservacao() {
return observacao;
}
public void setObservacao(String observacao) {
this.observacao = observacao.toUpperCase();
}
public Pessoa getId_pessoa_solicita() {
return id_pessoa_solicita;
}
public String getId_pessoa_solicitaNome() {
return id_pessoa_solicita.getNome();
}
public void setId_pessoa_solicita(Pessoa id_pessoa_solicita) {
this.id_pessoa_solicita = id_pessoa_solicita;
}
public Pessoa getId_pessoa_autoriza() {
return id_pessoa_autoriza;
}
public void setId_pessoa_autoriza(Pessoa id_pessoa_autoriza) {
this.id_pessoa_autoriza = id_pessoa_autoriza;
}
public String getNomePessoaSolicita() {
return this.id_pessoa_solicita.getNome();
}
}
|
3e0ea6f3814787debf92bc9695f83e77408d8d6f | 5,080 | java | Java | aws-java-sdk-cognitoidp/src/main/java/com/amazonaws/services/cognitoidp/model/DeleteUserPoolClientRequest.java | iterate-ch/aws-sdk-java | ef20f951d59a5412e0b483729f81722e4ad2bf53 | [
"Apache-2.0"
] | 1 | 2019-02-08T15:23:02.000Z | 2019-02-08T15:23:02.000Z | aws-java-sdk-cognitoidp/src/main/java/com/amazonaws/services/cognitoidp/model/DeleteUserPoolClientRequest.java | iterate-ch/aws-sdk-java | ef20f951d59a5412e0b483729f81722e4ad2bf53 | [
"Apache-2.0"
] | null | null | null | aws-java-sdk-cognitoidp/src/main/java/com/amazonaws/services/cognitoidp/model/DeleteUserPoolClientRequest.java | iterate-ch/aws-sdk-java | ef20f951d59a5412e0b483729f81722e4ad2bf53 | [
"Apache-2.0"
] | 1 | 2022-03-22T05:35:12.000Z | 2022-03-22T05:35:12.000Z | 29.364162 | 123 | 0.607874 | 6,222 | /*
* Copyright 2011-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents the request to delete a user pool client.
* </p>
*/
public class DeleteUserPoolClientRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The user pool ID for the user pool where you want to delete the client.
* </p>
*/
private String userPoolId;
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*/
private String clientId;
/**
* <p>
* The user pool ID for the user pool where you want to delete the client.
* </p>
*
* @param userPoolId
* The user pool ID for the user pool where you want to delete the client.
*/
public void setUserPoolId(String userPoolId) {
this.userPoolId = userPoolId;
}
/**
* <p>
* The user pool ID for the user pool where you want to delete the client.
* </p>
*
* @return The user pool ID for the user pool where you want to delete the client.
*/
public String getUserPoolId() {
return this.userPoolId;
}
/**
* <p>
* The user pool ID for the user pool where you want to delete the client.
* </p>
*
* @param userPoolId
* The user pool ID for the user pool where you want to delete the client.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteUserPoolClientRequest withUserPoolId(String userPoolId) {
setUserPoolId(userPoolId);
return this;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @param clientId
* The ID of the client associated with the user pool.
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @return The ID of the client associated with the user pool.
*/
public String getClientId() {
return this.clientId;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @param clientId
* The ID of the client associated with the user pool.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteUserPoolClientRequest withClientId(String clientId) {
setClientId(clientId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUserPoolId() != null)
sb.append("UserPoolId: " + getUserPoolId() + ",");
if (getClientId() != null)
sb.append("ClientId: " + getClientId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteUserPoolClientRequest == false)
return false;
DeleteUserPoolClientRequest other = (DeleteUserPoolClientRequest) obj;
if (other.getUserPoolId() == null ^ this.getUserPoolId() == null)
return false;
if (other.getUserPoolId() != null && other.getUserPoolId().equals(this.getUserPoolId()) == false)
return false;
if (other.getClientId() == null ^ this.getClientId() == null)
return false;
if (other.getClientId() != null && other.getClientId().equals(this.getClientId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getUserPoolId() == null) ? 0 : getUserPoolId().hashCode());
hashCode = prime * hashCode + ((getClientId() == null) ? 0 : getClientId().hashCode());
return hashCode;
}
@Override
public DeleteUserPoolClientRequest clone() {
return (DeleteUserPoolClientRequest) super.clone();
}
}
|
3e0ea7459a67255977bc29f2aead002018d2289d | 2,026 | java | Java | core/runtime/src/main/java/org/apache/polygene/runtime/injection/provider/CachingInjectionProviderFactoryDecorator.java | apache/polygene-java | 031beef870302a0bd01bd5895ce849e00f2d5d5b | [
"MIT"
] | 60 | 2017-02-06T10:42:51.000Z | 2022-02-12T14:41:17.000Z | core/runtime/src/main/java/org/apache/polygene/runtime/injection/provider/CachingInjectionProviderFactoryDecorator.java | DalavanCloud/attic-polygene-java | 031beef870302a0bd01bd5895ce849e00f2d5d5b | [
"MIT"
] | 3 | 2015-07-28T10:23:31.000Z | 2016-12-03T14:56:17.000Z | core/runtime/src/main/java/org/apache/polygene/runtime/injection/provider/CachingInjectionProviderFactoryDecorator.java | DalavanCloud/attic-polygene-java | 031beef870302a0bd01bd5895ce849e00f2d5d5b | [
"MIT"
] | 17 | 2015-07-26T14:19:10.000Z | 2016-11-29T17:38:05.000Z | 35.54386 | 115 | 0.749753 | 6,223 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.runtime.injection.provider;
import org.apache.polygene.bootstrap.InvalidInjectionException;
import org.apache.polygene.runtime.injection.DependencyModel;
import org.apache.polygene.runtime.injection.InjectionProvider;
import org.apache.polygene.runtime.injection.InjectionProviderFactory;
import org.apache.polygene.runtime.model.Resolution;
/**
* JAVADOC
*/
public class CachingInjectionProviderFactoryDecorator
implements InjectionProviderFactory
{
private final InjectionProviderFactory decoratedFactory;
public CachingInjectionProviderFactoryDecorator( InjectionProviderFactory decoratedFactory )
{
this.decoratedFactory = decoratedFactory;
}
@Override
public InjectionProvider newInjectionProvider( Resolution resolution, DependencyModel dependencyModel )
throws InvalidInjectionException
{
InjectionProvider injectionProvider = decoratedFactory.newInjectionProvider( resolution, dependencyModel );
if( injectionProvider != null )
{
return new CachingInjectionProviderDecorator( injectionProvider );
}
else
{
return null;
}
}
}
|
3e0ea8adad5047113d621f2ebe6a6a9c59ac6d8a | 3,841 | java | Java | keycloak-spring-boot-autoconfigure/src/main/java/com/github/topikachu/keycloak/spring/support/security/KeycloakAdminCredentialFilter.java | topikachu/keycloak-spring-boot | 4699e482cf8f36e3a998b69e0be7e996ff85c766 | [
"Apache-2.0"
] | null | null | null | keycloak-spring-boot-autoconfigure/src/main/java/com/github/topikachu/keycloak/spring/support/security/KeycloakAdminCredentialFilter.java | topikachu/keycloak-spring-boot | 4699e482cf8f36e3a998b69e0be7e996ff85c766 | [
"Apache-2.0"
] | null | null | null | keycloak-spring-boot-autoconfigure/src/main/java/com/github/topikachu/keycloak/spring/support/security/KeycloakAdminCredentialFilter.java | topikachu/keycloak-spring-boot | 4699e482cf8f36e3a998b69e0be7e996ff85c766 | [
"Apache-2.0"
] | null | null | null | 40.861702 | 157 | 0.720646 | 6,224 | package com.github.topikachu.keycloak.spring.support.security;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.jboss.resteasy.plugins.server.servlet.ServletUtil;
import org.jboss.resteasy.specimpl.ResteasyHttpHeaders;
import org.keycloak.common.ClientConnection;
import org.keycloak.jose.jws.JWSInput;
import org.keycloak.jose.jws.JWSInputException;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.representations.AccessToken;
import org.keycloak.services.managers.AppAuthManager;
import org.keycloak.services.managers.AuthenticationManager;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.services.resources.admin.AdminAuth;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.filter.OncePerRequestFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
@RequiredArgsConstructor
public class KeycloakAdminCredentialFilter extends OncePerRequestFilter {
@NonNull
private String keycloakServletPrefix;
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException {
extractAuthentication(request);
filterChain.doFilter(request, response);
}
private void extractAuthentication(HttpServletRequest request) {
KeycloakSession session = (KeycloakSession) request.getAttribute("KEYCLOAK_SESSION");
ClientConnection clientConnection = (ClientConnection) request.getAttribute("KEYCLOAK_CLIENT_CONNECTION");
if (session == null) {
return;
}
ResteasyHttpHeaders headers = ServletUtil.extractHttpHeaders(request);
String tokenString = AppAuthManager.extractAuthorizationHeaderToken(headers);
if (tokenString == null) {
return;
}
AccessToken token;
try {
JWSInput input = new JWSInput(tokenString);
token = input.readJsonContent(AccessToken.class);
} catch (JWSInputException e) {
logger.warn("Bearer token format error");
return;
}
String realmName = token.getIssuer().substring(token.getIssuer().lastIndexOf('/') + 1);
RealmManager realmManager = new RealmManager(session);
RealmModel realm = realmManager.getRealmByName(realmName);
if (realm == null) {
logger.warn("Unknown realm in token");
return;
}
session.getContext().setRealm(realm);
UriInfo uriInfo = ServletUtil.extractUriInfo(request, keycloakServletPrefix);
AuthenticationManager.AuthResult authResult = new AppAuthManager.BearerTokenAuthenticator(session)
.setRealm(realm)
.setUriInfo(uriInfo)
.setConnection(clientConnection)
.setHeaders(headers)
.authenticate();
if (authResult == null) {
logger.debug("Token not valid");
return;
}
ClientModel client = realm.getClientByClientId(token.getIssuedFor());
if (client == null) {
logger.debug("Could not find client for authorization");
return;
}
AdminAuth adminAuth = new AdminAuth(realm, authResult.getToken(), authResult.getUser(), client);
//
KeycloakAuthentication authentication = new KeycloakAuthentication(authResult, session, adminAuth);
authentication.setAuthenticated(true);
SecurityContextHolder.getContext().setAuthentication(authentication);
}
}
|
3e0eaa1818f265f1e7bf77530a89fe37c2ce9088 | 1,560 | java | Java | src/test/java/fr/metabohub/peakforest/services/maps/MapManagerManagementServiceTest.java | peakforest/peakforest-webapp | 8384d687e23d034925bc74ef8600749c0780fb2a | [
"OML",
"RSA-MD"
] | null | null | null | src/test/java/fr/metabohub/peakforest/services/maps/MapManagerManagementServiceTest.java | peakforest/peakforest-webapp | 8384d687e23d034925bc74ef8600749c0780fb2a | [
"OML",
"RSA-MD"
] | null | null | null | src/test/java/fr/metabohub/peakforest/services/maps/MapManagerManagementServiceTest.java | peakforest/peakforest-webapp | 8384d687e23d034925bc74ef8600749c0780fb2a | [
"OML",
"RSA-MD"
] | null | null | null | 31.836735 | 97 | 0.780769 | 6,225 | package fr.metabohub.peakforest.services.maps;
import java.util.ResourceBundle;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import fr.metabohub.peakforest.model.maps.MapEntity;
import fr.metabohub.peakforest.model.maps.MapManager;
import fr.metabohub.peakforest.utils.PeakForestUtils;
public class MapManagerManagementServiceTest {
public Logger logger = Logger.getRootLogger();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// set test properties file
PeakForestUtils.setBundleConf(ResourceBundle.getBundle("confTest"));
}
@Test
public void test() throws Exception {
// fail("Not yet implemented");
logger.info("[junit test] mapManagerServiceTest -> begin");
long beforeTime = System.currentTimeMillis();
// testSessionFactory
if (MapManagerManagementService.exists(MapManager.MAP_METEXPLORE))
MapManagerManagementService.delete(MapManager.MAP_METEXPLORE);
MapManager test = new MapManager(MapManager.MAP_METEXPLORE);
test.addMapEntities(new MapEntity(test));
test.addMapEntities(new MapEntity(test));
test.addMapEntities(new MapEntity(test));
MapManagerManagementService.create(test);
MapManager test2 = MapManagerManagementService.read(MapManager.MAP_METEXPLORE);
Assert.assertEquals("[error]", test2.getMapEntities().size(), 3);
double checkDuration = (double) (System.currentTimeMillis() - beforeTime) / 1000;
logger.info("[junit test] mapManagerServiceTest -> end, tested in " + checkDuration + " sec.");
}
}
|
3e0eaa459eb14b36c9718dcf2f785a622b82575b | 1,136 | java | Java | src/main/java/edu/umdearborn/astronomyapp/util/email/NewUserEmailContextBuilder.java | telferha/astronomy-app | af32cb51fd4c460491cdde5d51bc4fae4427ad35 | [
"MIT"
] | null | null | null | src/main/java/edu/umdearborn/astronomyapp/util/email/NewUserEmailContextBuilder.java | telferha/astronomy-app | af32cb51fd4c460491cdde5d51bc4fae4427ad35 | [
"MIT"
] | null | null | null | src/main/java/edu/umdearborn/astronomyapp/util/email/NewUserEmailContextBuilder.java | telferha/astronomy-app | af32cb51fd4c460491cdde5d51bc4fae4427ad35 | [
"MIT"
] | null | null | null | 27.047619 | 100 | 0.756162 | 6,226 | package edu.umdearborn.astronomyapp.util.email;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import org.springframework.util.Assert;
import edu.umdearborn.astronomyapp.entity.AstroAppUser;
public class NewUserEmailContextBuilder implements EmailContextBuilder {
public static final String EMAIL_SUBJECT = "Welcome to UMD Astronomy Learning Module Application";
public static final String TEMPLATE = "/email/new-user-email-template.htm";
private AstroAppUser user;
public NewUserEmailContextBuilder(AstroAppUser user) {
this.user = user;
}
@Override
public Map<String, String> buildContext() {
Map<String, String> context = new HashMap<>();
context.put(TEMPLATE_NAME_KEY, TEMPLATE);
context.put(TO_EMAIL_KEY, user.getEmail());
context.put(EMAIL_SUBJECT_KEY, EMAIL_SUBJECT);
context.put(USER_FIRST_NAME_KEY, user.getFirstName());
context.put(USER_LAST_NAME_KEY, user.getLastName());
context.put("user.temppass", user.getPassword());
return context;
}
@PostConstruct
public void postConstruct() {
Assert.notNull(user);
}
}
|
3e0eaa88802c1151efa9e49883ebd9b4ca5ba568 | 721 | java | Java | test/IntegrationTest.java | sant0s/play2-chart-sample | 4b914438ca9e895cbd49da7edf01e22c065fe46c | [
"Apache-2.0"
] | 1 | 2015-04-01T06:53:25.000Z | 2015-04-01T06:53:25.000Z | test/IntegrationTest.java | sant0s/play2-chart-sample | 4b914438ca9e895cbd49da7edf01e22c065fe46c | [
"Apache-2.0"
] | null | null | null | test/IntegrationTest.java | sant0s/play2-chart-sample | 4b914438ca9e895cbd49da7edf01e22c065fe46c | [
"Apache-2.0"
] | null | null | null | 27.730769 | 103 | 0.764216 | 6,227 | import static org.fest.assertions.Assertions.assertThat;
import static play.test.Helpers.HTMLUNIT;
import static play.test.Helpers.fakeApplication;
import static play.test.Helpers.inMemoryDatabase;
import static play.test.Helpers.running;
import static play.test.Helpers.testServer;
import org.junit.Test;
import play.libs.F.Callback;
import play.test.TestBrowser;
public class IntegrationTest {
@Test
public void test() {
running(testServer(3333, fakeApplication(inMemoryDatabase())), HTMLUNIT, new Callback<TestBrowser>() {
public void invoke(TestBrowser browser) {
browser.goTo("http://localhost:3333");
assertThat(browser.pageSource()).contains("Chart module application");
}
});
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.