hexsha
stringlengths 40
40
| size
int64 8
1.04M
| content
stringlengths 8
1.04M
| avg_line_length
float64 2.24
100
| max_line_length
int64 4
1k
| alphanum_fraction
float64 0.25
0.97
|
|---|---|---|---|---|---|
860ff505a106ad2939a01d14569435ae4e8a3081
| 1,962
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|oak
operator|.
name|plugins
operator|.
name|lock
package|;
end_package
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Set
import|;
end_import
begin_import
import|import
name|com
operator|.
name|google
operator|.
name|common
operator|.
name|collect
operator|.
name|ImmutableSet
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|JcrConstants
import|;
end_import
begin_comment
comment|/** * @deprecated Use {@link org.apache.jackrabbit.oak.spi.lock.LockConstants} instead */
end_comment
begin_interface
specifier|public
interface|interface
name|LockConstants
extends|extends
name|JcrConstants
block|{
name|Set
argument_list|<
name|String
argument_list|>
name|LOCK_PROPERTY_NAMES
init|=
name|ImmutableSet
operator|.
name|of
argument_list|(
name|JCR_LOCKISDEEP
argument_list|,
name|JCR_LOCKOWNER
argument_list|)
decl_stmt|;
block|}
end_interface
end_unit
| 22.044944
| 810
| 0.79052
|
7282925f09d2051f542f73501ee6094618700d4a
| 5,876
|
package sarong;
import sarong.util.StringKit;
import java.io.Serializable;
/**
* A new and different RNG type that uses two 64-bit states, with one a 64-bit Galois LFSR and another that updates by
* adding a large constant and the result of the LFSR to its current value. It runs the value of this erratic-updating
* second state through a bare-bones unary hash (a xorshift, a multiply, and a xorshift, which is normally not enough)
* and returns it. This does well in PractRand testing, getting only one "unusual" anomaly at 4TB. It has a large period
* of {@code pow(2, 128) - pow(2, 64)}, and importantly is 1-dimensionally equidistributed (every result occurs exactly
* {@code pow(2, 64) - 1} times, which is technically better than xoroshiro128+ because that generator, despite
* identical state size and a longer period by {@code pow(2, 64) - 1}, doesn't produce 0 as often as it does other
* results). Getting ordered pairs of results, every result will be followed by every other result except one, which
* means this falls just shy of being 2-dimensionally equidistributed.
* <br>
* The name comes from Dodgers baseball star Cody Bellinger, who also came from a middling earlier set of
* accomplishments (like a simple Galois LFSR), figured out how to improve, and became something great all-around.
* I had also just written TroutRNG (keeping with the aquatic animal theme), and Cody Bellinger had just surpassed Mike
* Trout's home run count so far for the season.
* <br>
* Created by Tommy Ettinger on 8/21/2019.
*/
public final class BellRNG implements RandomnessSource, Serializable {
private static final long serialVersionUID = 4L;
/**
* Can be any long value.
*/
private long stateA;
/**
* Must be non-zero.
*/
private long stateB;
/**
* Creates a new generator seeded using Math.random.
*/
public BellRNG() {
this((long) ((Math.random() - 0.5) * 0x10000000000000L)
^ (long) (((Math.random() - 0.5) * 2.0) * 0x8000000000000000L),
(long) ((Math.random() - 0.5) * 0x10000000000000L)
^ (long) (((Math.random() - 0.5) * 2.0) * 0x8000000000000000L));
}
public BellRNG(long seed) {
stateA = (seed = ((seed = (((seed * 0x632BE59BD9B4E019L) ^ 0x9E3779B97F4A7C15L) * 0xC6BC279692B5CC83L)) ^ seed >>> 27) * 0xAEF17502108EF2D9L) ^ seed >>> 25;
stateB = ((seed = ((seed = (((seed * 0x632BE59BD9B4E019L) ^ 0x9E3779B97F4A7C15L) * 0xC6BC279692B5CC83L)) ^ seed >>> 27) * 0xAEF17502108EF2D9L) ^ seed >>> 25);
if(stateB == 0L)
stateB = 1L;
}
public BellRNG(final long seedA, final long seedB) {
stateA = seedA;
stateB = seedB == 0L ? 1L : seedB;
}
/**
* Get the "A" part of the internal state as a long.
*
* @return the current internal "A" state of this object.
*/
public long getStateA() {
return stateA;
}
/**
* Set the "A" part of the internal state with a long.
*
* @param stateA a 64-bit long
*/
public void setStateA(long stateA) {
this.stateA = stateA;
}
/**
* Get the "B" part of the internal state as a long.
*
* @return the current internal "B" state of this object.
*/
public long getStateB() {
return stateB;
}
/**
* Set the "B" part of the internal state with a long; if given 0, this will ignore it and use 1 instead.
*
* @param stateB a 64-bit long
*/
public void setStateB(long stateB) {
this.stateB = stateB == 0L ? 1L : stateB;
}
/**
* Using this method, any algorithm that might use the built-in Java Random
* can interface with this randomness source.
*
* @param bits the number of bits to be returned
* @return the integer containing the appropriate number of bits
*/
@Override
public final int next(final int bits) {
long s = (stateA += (stateB = (stateB >>> 1 ^ (-(stateB & 1L) & 0xD800000000000000L))) + 0x9E3779B97F4A7C15L);
s = (s ^ s >>> 30) * 0x369DEA0F31A53F85L;
return (int)(s ^ s >>> 28) >>> (32 - bits);
}
/**
* Using this method, any algorithm that needs to efficiently generate more
* than 32 bits of random data can interface with this randomness source.
* <p>
* Get a random long between Long.MIN_VALUE and Long.MAX_VALUE (both inclusive).
*
* @return a random long between Long.MIN_VALUE and Long.MAX_VALUE (both inclusive)
*/
@Override
public final long nextLong() {
long s = (stateA += (stateB = (stateB >>> 1 ^ (-(stateB & 1L) & 0xD800000000000000L))) + 0x9E3779B97F4A7C15L);
s = (s ^ s >>> 30) * 0x369DEA0F31A53F85L;
return s ^ s >>> 28;
}
/**
* Produces a copy of this RandomnessSource that, if next() and/or nextLong() are called on this object and the
* copy, both will generate the same sequence of random numbers from the point copy() was called. This just needs to
* copy the state so it isn't shared, usually, and produce a new value with the same exact state.
*
* @return a copy of this RandomnessSource
*/
@Override
public BellRNG copy() {
return new BellRNG(stateA, stateB);
}
@Override
public String toString() {
return "BellRNG with stateA 0x" + StringKit.hex(stateA) + "L and stateB 0x" + StringKit.hex(stateB) + 'L';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BellRNG bellRNG = (BellRNG) o;
return stateA == bellRNG.stateA && stateB == bellRNG.stateB;
}
@Override
public int hashCode() {
return (int) (31L * (stateA ^ (stateA >>> 32)) + (stateB ^ stateB >>> 32));
}
}
| 38.155844
| 166
| 0.62968
|
15e05703c5117283b81412d41f4d7e2770ff8610
| 1,414
|
package com.yourtion.Pattern08;
import java.util.HashMap;
import java.util.Map;
/**
* Created by Yourtion on 23/11/2016.
*/
public class PersonExample {
private Map<Integer, Person> people;
public PersonExample() {
people = new HashMap<Integer, Person>();
}
public Person fetchPerson(Integer id) {
Person person = people.get(id);
if (null != person)
return person;
else
return new NullPerson();
}
// Code to add/remove people
public Person buildPerson(String firstName, String lastName) {
if (null != firstName && null != lastName)
return new RealPerson(firstName, lastName);
else
return new NullPerson();
}
public void run() {
System.out.println("BuildPerson :");
PersonExample example = new PersonExample();
System.out.println("Not null: " + example.buildPerson("Yourtion", "Guo"));
System.out.println("Null first name: " + example.buildPerson(null, "Guo"));
System.out.println("Null last name: " + example.buildPerson("Yourtion", null));
System.out.println("");
System.out.println("FetchPerson :");
example.people.put(0, example.buildPerson("Yourtion", "Guo"));
System.out.println("Not null: " + example.fetchPerson(0));
System.out.println("Null: " + example.fetchPerson(1));
}
}
| 27.192308
| 87
| 0.611033
|
598e1eafda700ca9a8e09ff7e4902d962912e897
| 3,819
|
/*
* JBoss, Home of Professional Open Source
* Copyright 2013, Red Hat, Inc. and/or its affiliates, and individual
* contributors by the @authors tag. See the copyright.txt in the
* distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.quickstarts.portal.social.oauth.google;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.portlet.PortletException;
import javax.portlet.PortletRequestDispatcher;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import com.google.api.services.plus.Plus;
import com.google.api.services.plus.model.Activity;
import com.google.api.services.plus.model.ActivityFeed;
import com.google.api.services.plus.model.CommentFeed;
import org.gatein.api.oauth.AccessToken;
import org.gatein.api.oauth.OAuthProvider;
import org.jboss.quickstarts.portal.social.oauth.AbstractSocialPortlet;
/**
* Simple portlet for displaying latest activities from your Google+ wall. It's read-only portlet.
*
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class GoogleActivitiesPortlet extends AbstractSocialPortlet {
public static final String REQUIRED_SCOPE = "https://www.googleapis.com/auth/plus.login";
@Override
protected String getOAuthProviderKey() {
return OAuthProvider.GOOGLE;
}
// See https://developers.google.com/+/api/latest/activities/list for details
@Override
protected void doView(RenderRequest request, RenderResponse response) throws PortletException, IOException {
AccessToken accessToken = getAccessToken();
final Plus service = getOAuthProvider().getAuthorizedSocialApiObject(accessToken, Plus.class);
final Plus.Activities.List list = service.activities().list("me", "public");
list.setMaxResults(10L);
ActivityFeed activityFeed = new GooglePortletRequest<ActivityFeed>(request, response, getPortletContext(),
getOAuthProvider(), REQUIRED_SCOPE) {
@Override
protected ActivityFeed invokeRequest() throws IOException {
return list.execute();
}
}.executeRequest();
List<GoogleActivityBean> googleActivityBeanList = new ArrayList<GoogleActivityBean>();
if (activityFeed != null) {
for (final Activity activity : activityFeed.getItems()) {
GoogleActivityBean gab = new GoogleActivityBean(activity);
CommentFeed comments = new GooglePortletRequest<CommentFeed>(request, response, getPortletContext(),
getOAuthProvider(), REQUIRED_SCOPE) {
@Override
protected CommentFeed invokeRequest() throws IOException {
return service.comments().list(activity.getId()).execute();
}
}.executeRequest();
gab.setCommentFeed(comments);
googleActivityBeanList.add(gab);
}
request.setAttribute("googleActivityBeanList", googleActivityBeanList);
PortletRequestDispatcher prd = getPortletContext().getRequestDispatcher("/jsp/google/activities.jsp");
prd.include(request, response);
}
}
}
| 38.969388
| 116
| 0.705158
|
2228c955f688ebf8b6a807d9c5916848a5ba2ad4
| 31,880
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan.misc;
import static org.junit.Assert.fail;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Scope;
import org.apache.geode.internal.cache.wan.WANTestBase;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.junit.categories.WanTest;
@Category({WanTest.class})
public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBase {
public ReplicatedRegion_ParallelWANPersistenceDUnitTest() {
super();
// TODO Auto-generated constructor stub
}
final String expectedExceptions = null;
/**
* Below test is disabled intentionally 1> In this release 8.0, for rolling upgrade support queue
* name is changed to old style 2>Common parallel sender for different non colocated regions is
* not supported in 8.0 so no need to bother about ParallelGatewaySenderQueue#convertPathToName 3>
* We have to enabled it in next release 4> Version based rolling upgrade support should be
* provided. based on the version of the gemfire QSTRING should be used between 8.0 and version
* prior to 8.0
*/
@Ignore
@Test
public void test_DR_PGSPERSISTENCE_VALIDATEQUEUE_Restart_Validate_Receiver() {
// create locator on local site
Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
// create locator on remote site
Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
// create receiver on remote site
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
vm2.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap()));
vm3.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap()));
// create cache in local site
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
// create senders with disk store
String diskStore1 = vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore2 = vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore3 = vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore4 = vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
LogWriterUtils.getLogWriter()
.info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
vm5.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
vm6.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
vm7.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
// start puts in region on local site
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_RR", 3000));
LogWriterUtils.getLogWriter().info("Completed puts in the region");
// --------------------close and rebuild local site
// -------------------------------------------------
// kill the senders
/*
* ExpectedException exp1 = addExpectedException(CacheClosedException.class .getName()); try {
*/ vm4.invoke(() -> WANTestBase.killSender());
vm5.invoke(() -> WANTestBase.killSender());
vm6.invoke(() -> WANTestBase.killSender());
vm7.invoke(() -> WANTestBase.killSender());
/*
* } finally { exp1.remove(); }
*/
LogWriterUtils.getLogWriter().info("Killed all the senders.");
// restart the vm
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
vm6.invoke(() -> WANTestBase.createCache(lnPort));
vm7.invoke(() -> WANTestBase.createCache(lnPort));
LogWriterUtils.getLogWriter().info("Created back the cache");
// create senders with disk store
vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore1, true));
vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore2, true));
vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore3, true));
vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore4, true));
LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
// create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
AsyncInvocation inv2 = vm5.invokeAsync(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
AsyncInvocation inv3 = vm6.invokeAsync(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
AsyncInvocation inv4 = vm7.invokeAsync(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
// start the senders in async mode. This will ensure that the
// node of shadow PR that went down last will come up first
startSenderInVMsAsync("ln", vm4, vm5, vm6, vm7);
LogWriterUtils.getLogWriter().info("Waiting for senders running.");
// wait for senders running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
LogWriterUtils.getLogWriter().info("All the senders are now running...");
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
// ----------------------------------------------------------------------------------------------------
vm4.invoke(() -> WANTestBase.doNextPuts(getTestMethodName() + "_RR", 3000, 10000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 10000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 10000));
}
/**
* Below test is disabled intentionally 1> In this release 8.0, for rolling upgrade support queue
* name is changed to old style 2>Common parallel sender for different non colocated regions is
* not supported in 8.0 so no need to bother about ParallelGatewaySenderQueue#convertPathToName 3>
* We have to enabled it in next release 4> Version based rolling upgrade support should be
* provided. based on the version of the gemfire QSTRING should be used between 8.0 and version
* prior to 8.0
*/
@Ignore
@Test
public void test_DRPERSISTENCE_PGSPERSISTENCE_VALIDATEQUEUE_Restart_Validate_Receiver() {
// create locator on local site
Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
// create locator on remote site
Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
// create receiver on remote site
createCacheInVMs(nyPort, vm2, vm3);
vm2.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap()));
vm3.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap()));
createReceiverInVMs(vm2, vm3);
// create cache in local site
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
// create senders with disk store
String diskStore1 = vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore2 = vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore3 = vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore4 = vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
LogWriterUtils.getLogWriter()
.info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm5.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm6.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm7.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
// start puts in region on local site
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_RR", 3000));
LogWriterUtils.getLogWriter().info("Completed puts in the region");
// --------------------close and rebuild local site
// -------------------------------------------------
// kill the senders
/*
* ExpectedException exp1 = addExpectedException(CacheClosedException.class .getName()); try {
*/
vm4.invoke(() -> WANTestBase.killSender());
vm5.invoke(() -> WANTestBase.killSender());
vm6.invoke(() -> WANTestBase.killSender());
vm7.invoke(() -> WANTestBase.killSender());
/*
* } finally { exp1.remove(); }
*/
// restart the vm
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
vm6.invoke(() -> WANTestBase.createCache(lnPort));
vm7.invoke(() -> WANTestBase.createCache(lnPort));
LogWriterUtils.getLogWriter().info("Created back the cache");
// create senders with disk store
vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore1, true));
vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore2, true));
vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore3, true));
vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore4, true));
LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
// create PR on local site
AsyncInvocation inv1 =
vm4.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv2 =
vm5.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv3 =
vm6.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv4 =
vm7.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
// start the senders in async mode. This will ensure that the
// node of shadow PR that went down last will come up first
startSenderInVMsAsync("ln", vm4, vm5, vm6, vm7);
LogWriterUtils.getLogWriter().info("Waiting for senders running.");
// wait for senders running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
LogWriterUtils.getLogWriter().info("All the senders are now running...");
// ----------------------------------------------------------------------------------------------------
vm4.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm5.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm6.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm7.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
/*
* exp1 = addExpectedException(CacheClosedException.class.getName()); try {
*/ vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm4.invoke(() -> WANTestBase.doNextPuts(getTestMethodName() + "_RR", 3000, 10000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 10000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 10000));
/*
* } finally { exp1.remove(); }
*/ }
/**
* Below test is disabled intentionally 1> In this release 8.0, for rolling upgrade support queue
* name is changed to old style 2>Common parallel sender for different non colocated regions is
* not supported in 8.0 so no need to bother about ParallelGatewaySenderQueue#convertPathToName 3>
* We have to enabled it in next release 4> Version based rolling upgrade support should be
* provided. based on the version of the gemfire QSTRING should be used between 8.0 and version
* prior to 8.0
*/
@Ignore
@Test
public void test_DRPERSISTENCE_PRPERSISTENCE_PGSPERSISTENCE_VALIDATEQUEUE_Restart_Validate_Receiver() {
// create locator on local site
Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
// create locator on remote site
Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
// create receiver on remote site
vm2.invoke(() -> WANTestBase.createCache(nyPort));
vm3.invoke(() -> WANTestBase.createCache(nyPort));
vm2.invoke(WANTestBase::createReceiver);
vm3.invoke(WANTestBase::createReceiver);
vm2.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap()));
vm3.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", null, isOffHeap()));
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
// create cache in local site
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
vm6.invoke(() -> WANTestBase.createCache(lnPort));
vm7.invoke(() -> WANTestBase.createCache(lnPort));
// create senders with disk store
String diskStore1 = vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore2 = vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore3 = vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore4 = vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
LogWriterUtils.getLogWriter()
.info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm5.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm6.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm7.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm4.invoke(() -> WANTestBase.createPersistentPartitionedRegion(getTestMethodName() + "_PR",
"ln", 1, 100, isOffHeap()));
vm5.invoke(() -> WANTestBase.createPersistentPartitionedRegion(getTestMethodName() + "_PR",
"ln", 1, 100, isOffHeap()));
vm6.invoke(() -> WANTestBase.createPersistentPartitionedRegion(getTestMethodName() + "_PR",
"ln", 1, 100, isOffHeap()));
vm7.invoke(() -> WANTestBase.createPersistentPartitionedRegion(getTestMethodName() + "_PR",
"ln", 1, 100, isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
// start puts in region on local site
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_RR", 3000));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 3000));
LogWriterUtils.getLogWriter().info("Completed puts in the region");
// --------------------close and rebuild local site
// -------------------------------------------------
// kill the senders
/*
* ExpectedException exp1 = addExpectedException(CacheClosedException.class .getName()); try {
*/ vm4.invoke(() -> WANTestBase.killSender());
vm5.invoke(() -> WANTestBase.killSender());
vm6.invoke(() -> WANTestBase.killSender());
vm7.invoke(() -> WANTestBase.killSender());
/*
* } finally { exp1.remove(); }
*/
LogWriterUtils.getLogWriter().info("Killed all the senders.");
// restart the vm
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
vm6.invoke(() -> WANTestBase.createCache(lnPort));
vm7.invoke(() -> WANTestBase.createCache(lnPort));
LogWriterUtils.getLogWriter().info("Created back the cache");
// create senders with disk store
vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore1, true));
vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore2, true));
vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore3, true));
vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore4, true));
LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
// create PR on local site
AsyncInvocation inv1 =
vm4.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv2 =
vm5.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv3 =
vm6.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv4 =
vm7.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
inv1 = vm4.invokeAsync(() -> WANTestBase
.createPersistentPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100, isOffHeap()));
inv2 = vm5.invokeAsync(() -> WANTestBase
.createPersistentPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100, isOffHeap()));
inv3 = vm6.invokeAsync(() -> WANTestBase
.createPersistentPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100, isOffHeap()));
inv4 = vm7.invokeAsync(() -> WANTestBase
.createPersistentPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100, isOffHeap()));
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
// start the senders in async mode. This will ensure that the
// node of shadow PR that went down last will come up first
startSenderInVMsAsync("ln", vm4, vm5, vm6, vm7);
LogWriterUtils.getLogWriter().info("Waiting for senders running.");
// wait for senders running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
LogWriterUtils.getLogWriter().info("All the senders are now running...");
// ----------------------------------------------------------------------------------------------------
vm4.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm5.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm6.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm7.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm4.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 3000));
vm5.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 3000));
vm6.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 3000));
vm7.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 3000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 3000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 3000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 3000));
vm4.invoke(() -> WANTestBase.doNextPuts(getTestMethodName() + "_RR", 3000, 10000));
vm4.invoke(() -> WANTestBase.doNextPuts(getTestMethodName() + "_PR", 3000, 10000));
/*
* exp1 = addExpectedException(CacheClosedException.class.getName()); try {
*/
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 10000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 10000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 10000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 10000));
/*
* } finally { exp1.remove(); }
*/
}
/**
* Below test is disabled intentionally 1> In this release 8.0, for rolling upgrade support queue
* name is changed to old style 2>Common parallel sender for different non colocated regions is
* not supported in 8.0 so no need to bother about ParallelGatewaySenderQueue#convertPathToName 3>
* We have to enabled it in next release 4> Version based rolling upgrade support should be
* provided. based on the version of the gemfire QSTRING should be used between 8.0 and version
* prior to 8.0
*/
@Ignore
@Test
public void test_DRPERSISTENCE_PGSPERSISTENCE_4NODES_2NODESDOWN_Validate_Receiver()
throws Exception {
Integer lnPort = vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
// create senders with disk store
String diskStore1 = vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore2 = vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore3 = vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore4 = vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
LogWriterUtils.getLogWriter()
.info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm5.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm6.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
vm7.invoke(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm2.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
vm3.invoke(
() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap()));
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
Thread.sleep(60000);
{
AsyncInvocation inv1 = vm7.invokeAsync(() -> ReplicatedRegion_ParallelWANPropagationDUnitTest
.doPuts0(getTestMethodName() + "_RR", 10000));
Thread.sleep(1000);
AsyncInvocation inv2 = vm4.invokeAsync(() -> WANTestBase.killSender());
Thread.sleep(2000);
AsyncInvocation inv3 = vm6.invokeAsync(() -> ReplicatedRegion_ParallelWANPropagationDUnitTest
.doPuts1(getTestMethodName() + "_RR", 10000));
Thread.sleep(1500);
AsyncInvocation inv4 = vm5.invokeAsync(() -> WANTestBase.killSender());
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (Exception e) {
Assert.fail("UnExpected Exception", e);
}
}
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
LogWriterUtils.getLogWriter().info("Created back the cache");
// create senders with disk store
vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore1, true));
vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore2, true));
LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
AsyncInvocation inv1 =
vm4.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv2 =
vm5.invokeAsync(() -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln",
Scope.DISTRIBUTED_ACK, DataPolicy.PERSISTENT_REPLICATE, isOffHeap()));
AsyncInvocation inv3 = vm6.invokeAsync(() -> ReplicatedRegion_ParallelWANPropagationDUnitTest
.doPuts2(getTestMethodName() + "_RR", 15000));
try {
inv1.join();
inv2.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
startSenderInVMsAsync("ln", vm4, vm5);
LogWriterUtils.getLogWriter().info("Waiting for senders running.");
// wait for senders running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_RR", 15000));
}
}
| 47.653214
| 107
| 0.668256
|
fb39d0335382bc1bd89c17a1cf6e3ef302871b7d
| 3,028
|
package org.springframework.security.oauth2.client;
import org.junit.Before;
import org.junit.Test;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.oauth2.client.registration.ClientRegistration;
import org.springframework.security.oauth2.client.registration.TestClientRegistrations;
import org.springframework.security.oauth2.core.TestOAuth2AccessTokens;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
/**
* Tests for {@link AuthorizationCodeOAuth2AuthorizedClientProvider}.
*
* @author Joe Grandja
*/
public class AuthorizationCodeOAuth2AuthorizedClientProviderTests {
private AuthorizationCodeOAuth2AuthorizedClientProvider authorizedClientProvider;
private ClientRegistration clientRegistration;
private OAuth2AuthorizedClient authorizedClient;
private Authentication principal;
@Before
public void setup() {
this.authorizedClientProvider = new AuthorizationCodeOAuth2AuthorizedClientProvider();
this.clientRegistration = TestClientRegistrations.clientRegistration().build();
this.authorizedClient = new OAuth2AuthorizedClient(this.clientRegistration, "principal",
TestOAuth2AccessTokens.scopes("read", "write"));
this.principal = new TestingAuthenticationToken("principal", "password");
}
@Test
public void authorizeWhenContextIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizedClientProvider.authorize(null));
}
@Test
public void authorizeWhenNotAuthorizationCodeThenUnableToAuthorize() {
ClientRegistration clientCredentialsClient = TestClientRegistrations.clientCredentials().build();
// @formatter:off
OAuth2AuthorizationContext authorizationContext = OAuth2AuthorizationContext
.withClientRegistration(clientCredentialsClient).principal(this.principal)
.build();
// @formatter:on
assertThat(this.authorizedClientProvider.authorize(authorizationContext)).isNull();
}
@Test
public void authorizeWhenAuthorizationCodeAndAuthorizedThenNotAuthorize() {
// @formatter:off
OAuth2AuthorizationContext authorizationContext = OAuth2AuthorizationContext
.withAuthorizedClient(this.authorizedClient).principal(this.principal)
.build();
// @formatter:on
assertThat(this.authorizedClientProvider.authorize(authorizationContext)).isNull();
}
@Test
public void authorizeWhenAuthorizationCodeAndNotAuthorizedThenAuthorize() {
// @formatter:off
OAuth2AuthorizationContext authorizationContext = OAuth2AuthorizationContext
.withClientRegistration(this.clientRegistration).principal(this.principal)
.build();
// @formatter:on
assertThatExceptionOfType(ClientAuthorizationRequiredException.class)
.isThrownBy(() -> this.authorizedClientProvider.authorize(authorizationContext));
}
}
| 38.820513
| 103
| 0.829921
|
8e7823cf88e47c1e7faacaa99c6b84cd9398b43a
| 437
|
package com.jauxim.grandapp.ui.Activity.ActivityLogin;
public interface ActivityLoginView {
void showWait();
void removeWait();
void onFailure(String appErrorMessage);
void showUserError(int user_error);
void showPassError(int pass_error);
void showLoginError(int login_error);
void startMainActivity(boolean newUser);
void resetErrors();
void showForgotPassSuccess(int forgotpsw_success);
}
| 19.863636
| 54
| 0.745995
|
6addcb6235857301fec74bc3d355550b2febbe97
| 2,432
|
/*
* Copyright 2021 EPAM Systems.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.digital.data.platform.generator.metadata;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import java.util.List;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class AsyncDataProviderTest {
static final String READ_MODE_CHANGE_TYPE = "readMode";
static final String SEARCH_CONDITION_CHANGE_TYPE = "searchCondition";
AsyncDataProvider instance;
@Mock
private MetadataRepository metadataRepository;
@BeforeEach
void setUp() {
given(metadataRepository.findAll()).willReturn(generateMetadata());
instance = new AsyncDataProvider(new MetadataFacade(metadataRepository));
}
private List<Metadata> generateMetadata() {
return List.of(
new Metadata(1L, READ_MODE_CHANGE_TYPE, "createTable", "table_name", "async"),
new Metadata(2L, READ_MODE_CHANGE_TYPE, "createSearchCondition", "sc_name", "async"),
new Metadata(3L, READ_MODE_CHANGE_TYPE, "createSimpleSearchCondition", "ssc_name", "async"),
new Metadata(4L, SEARCH_CONDITION_CHANGE_TYPE, "sc_name", "column", "value")
);
}
@Test
void shouldFindAsyncData() {
var asyncData = instance.findAll();
assertThat(asyncData.getAsyncTables().size()).isEqualTo(1);
Assertions.assertTrue(asyncData.getAsyncTables().contains("table_name"));
assertThat(asyncData.getAsyncSearchConditions().size()).isEqualTo(2);
Assertions.assertTrue(asyncData.getAsyncSearchConditions().contains("sc_name_v"));
Assertions.assertTrue(asyncData.getAsyncSearchConditions().contains("ssc_name_v"));
}
}
| 36.848485
| 100
| 0.758224
|
0d94baec9e395c8509c6f2325d20e90b872ac205
| 1,055
|
package de.uhd.ifi.se.decision.management.jira.model.link;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import de.uhd.ifi.se.decision.management.jira.TestSetUp;
import de.uhd.ifi.se.decision.management.jira.model.DecisionKnowledgeProject;
import de.uhd.ifi.se.decision.management.jira.model.Link;
import de.uhd.ifi.se.decision.management.jira.testdata.Links;
public class TestIsInterProjectLink extends TestSetUp {
public Link link;
@Before
public void setUp() {
init();
link = Links.getTestLinks().get(0);
}
@Test
public void testFalseValidLink() {
assertFalse(link.isInterProjectLink());
}
@Test
public void testFalseInvalidLink() {
link.getTarget().setProject((DecisionKnowledgeProject) null);
assertFalse(link.isInterProjectLink());
link.getTarget().setProject("TEST");
}
@Test
public void testTrue() {
link.getTarget().setProject("CONDEC");
assertTrue(link.isInterProjectLink());
link.getTarget().setProject("TEST");
}
}
| 25.119048
| 77
| 0.762085
|
617dbe5fc0c330534e11f66297067d01874d21d7
| 7,189
|
package Algorand;
import java.math.BigInteger;
import com.algorand.algosdk.v2.client.common.AlgodClient;
import com.algorand.algosdk.account.Account;
import com.algorand.algosdk.v2.client.model.*;
import org.apache.commons.codec.digest.DigestUtils;
import org.json.JSONArray;
import org.json.JSONObject;
import com.algorand.algosdk.v2.client.common.*;
import com.algorand.algosdk.algod.client.ApiException;
import com.algorand.algosdk.crypto.Address;
import com.algorand.algosdk.transaction.SignedTransaction;
import com.algorand.algosdk.transaction.Transaction;
import com.algorand.algosdk.util.Encoder;
// Show Creating, modifying, sending and listing assets
public class CreateDomainAsset {
public AlgodClient client = null;
//Creates the main holding account for asset creation
public CreateDomainAsset() throws Exception {
CreateAlgoAccount accCreator = new CreateAlgoAccount();
}
// utility function to connect to a node
private AlgodClient connectToNetwork() {
final String ALGOD_API_ADDR = "localhost";
final int ALGOD_PORT = 4001;
final String ALGOD_API_TOKEN = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
AlgodClient client = new AlgodClient(ALGOD_API_ADDR, ALGOD_PORT, ALGOD_API_TOKEN);
return client;
}
// utility function to print created asset
public void printCreatedAsset(Account account, Long assetID) throws Exception {
if (client == null)
this.client = connectToNetwork();
String accountInfo = client.AccountInformation(account.getAddress()).execute().toString();
JSONObject jsonObj = new JSONObject(accountInfo.toString());
JSONArray jsonArray = (JSONArray) jsonObj.get("created-assets");
if (jsonArray.length() > 0) {
try {
for (Object o : jsonArray) {
JSONObject ca = (JSONObject) o;
Integer myassetIDInt = (Integer) ca.get("index");
if (assetID.longValue() == myassetIDInt.longValue()) {
System.out.println("Created Asset Info: " + ca.toString(2)); // pretty print
break;
}
}
} catch (Exception e) {
throw (e);
}
}
}
// utility function to print asset holding
public void printAssetHolding(Account account, Long assetID) throws Exception {
if (client == null)
this.client = connectToNetwork();
String accountInfo = client.AccountInformation(account.getAddress()).execute().toString();
JSONObject jsonObj = new JSONObject(accountInfo.toString());
JSONArray jsonArray = (JSONArray) jsonObj.get("assets");
if (jsonArray.length() > 0) {
try {
for (Object o : jsonArray) {
JSONObject ca = (JSONObject) o;
Integer myassetIDInt = (Integer) ca.get("asset-id");
if (assetID.longValue() == myassetIDInt.longValue()) {
System.out.println("Asset Holding Info: " + ca.toString(2)); // pretty print
break;
}
}
} catch (Exception e) {
throw (e);
}
}
}
// utility function to wait on a transaction to be confirmed
public void waitForConfirmation(String txID) throws Exception {
if (client == null)
this.client = connectToNetwork();
Long lastRound = client.GetStatus().execute().body().lastRound;
while (true) {
try {
// Check the pending tranactions
Response<PendingTransactionResponse> pendingInfo = client.PendingTransactionInformation(txID).execute();
if (pendingInfo.body().confirmedRound != null && pendingInfo.body().confirmedRound > 0) {
// Got the completed Transaction
System.out.println(
"Transaction " + txID + " confirmed in round " + pendingInfo.body().confirmedRound);
break;
}
lastRound++;
client.WaitForBlock(lastRound).execute();
} catch (Exception e) {
throw (e);
}
}
}
// Utility function for sending a raw signed transaction to the network
public String submitTransaction(SignedTransaction signedTx) throws Exception {
try {
// Msgpack encode the signed transaction
byte[] encodedTxBytes = Encoder.encodeToMsgPack(signedTx);
String id = client.RawTransaction().rawtxn(encodedTxBytes).execute().body().txId;
;
return (id);
} catch (ApiException e) {
throw (e);
}
}
public void CreateDomainAsset(Account account, String domain) throws Exception {
if (client == null)
this.client = connectToNetwork();
// recover example accounts
Account acct1 = account;
// CREATE ASSET
// get changing network parameters for each transaction
TransactionParametersResponse params = client.TransactionParams().execute().body();
params.fee = (long) 1000;
// Create the Asset:
BigInteger assetTotal = BigInteger.valueOf(1);
boolean defaultFrozen = false;
String unitName = "DOTALGO";
String assetName = domain;
String url = "http://localhost:3000"+domain;
String assetMetadataHash = MetadataHash(domain);
Address manager = acct1.getAddress();
Address reserve = acct1.getAddress();
Address freeze = acct1.getAddress();
Address clawback = acct1.getAddress();
Integer decimals = 0;
Transaction tx = Transaction.AssetCreateTransactionBuilder().sender(acct1.getAddress()).assetTotal(assetTotal)
.assetDecimals(decimals).assetUnitName(unitName).assetName(assetName).url(url)
.metadataHashUTF8(assetMetadataHash).manager(manager).reserve(reserve).freeze(freeze)
.defaultFrozen(defaultFrozen).clawback(clawback).suggestedParams(params).build();
// Sign the Transaction with creator account
SignedTransaction signedTx = acct1.signTransaction(tx);
Long assetID = null;
try {
String id = submitTransaction(signedTx);
System.out.println("Transaction ID: " + id);
waitForConfirmation(id);
// Read the transaction
PendingTransactionResponse pTrx = client.PendingTransactionInformation(id).execute().body();
// Now that the transaction is confirmed we can get the assetID
assetID = pTrx.assetIndex;
System.out.println("AssetID = " + assetID);
printCreatedAsset(acct1, assetID);
printAssetHolding(acct1, assetID);
} catch (Exception e) {
e.printStackTrace();
return;
}
}
private String MetadataHash(String metadata){
return DigestUtils.md5Hex(metadata).toUpperCase();
}
}
| 39.938889
| 120
| 0.613994
|
f5375bf9ab1a7f11663b1d1537a98cfb8f980759
| 2,056
|
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.onosproject.dhcprelay;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Service;
import org.onlab.packet.BasePacket;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.packet.VlanId;
import org.onosproject.dhcprelay.api.DhcpHandler;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.packet.PacketContext;
import java.util.Optional;
@Component
@Service
@Property(name = "version", value = "6")
public class Dhcp6HandlerImpl implements DhcpHandler {
@Override
public void processDhcpPacket(PacketContext context, BasePacket dhcp6Payload) {
}
@Override
public Optional<IpAddress> getDhcpServerIp() {
return null;
}
@Override
public Optional<IpAddress> getDhcpGatewayIp() {
return null;
}
@Override
public Optional<MacAddress> getDhcpConnectMac() {
return null;
}
@Override
public void setDhcpGatewayIp(IpAddress dhcpGatewayIp) {
}
@Override
public void setDhcpConnectVlan(VlanId dhcpConnectVlan) {
}
@Override
public void setDhcpConnectMac(MacAddress dhcpConnectMac) {
}
@Override
public void setDhcpServerConnectPoint(ConnectPoint dhcpServerConnectPoint) {
}
@Override
public void setDhcpServerIp(IpAddress dhcpServerIp) {
}
}
| 24.771084
| 83
| 0.736381
|
fe3be9270b3c6038921111822863cdd0a549e39c
| 702
|
package May2021Leetcode;
public class _0419BattleshipsInABoard {
public static void main(String[] args) {
System.out.println(countBattleships(new char[][] { new char[] { 'X', '.', '.', 'X' },
new char[] { '.', '.', '.', 'X' }, new char[] { '.', '.', '.', 'X' } }));
}
public static int countBattleships(char[][] board) {
if (board == null || board.length == 0)
return 0;
int count = 0;
for (int i = 0; i < board.length; i++) {
for (int j = 0; j < board[0].length; j++) {
if (board[i][j] == 'X') {
if (i - 1 >= 0 && board[i - 1][j] == 'X')
continue;
if (j - 1 >= 0 && board[i][j - 1] == 'X')
continue;
count++;
}
}
}
return count;
}
}
| 24.206897
| 87
| 0.488604
|
0d5789442df7ea654c8f81c8c72b501dd268f938
| 2,044
|
package figures;
import com.jogamp.opengl.*;
import com.jogamp.opengl.awt.GLCanvas;
//import javax.media.opengl.*;
//import javax.media.opengl.awt.GLCanvas;
import javax.swing.*;
public class ICGVector implements GLEventListener {
@Override
public void display(GLAutoDrawable drawable) {
final GL2 gl = drawable.getGL().getGL2();
gl.glBegin(GL2.GL_LINES);
//drawing the base
gl.glBegin(GL2.GL_LINES);
gl.glVertex3f(1f, 5f, 0);
gl.glVertex3f(-1f, 0f, 0);
gl.glEnd();
// //drawing the right edge
// gl.glBegin(GL2.GL_LINES);
// gl.glVertex3f(0f, 0.50f, 0);
// gl.glVertex3f(-0.50f, -0.50f, 0);
// gl.glEnd();
//
// //drawing the lft edge
// gl.glBegin(GL2.GL_LINES);
// gl.glVertex3f(0f, 0.50f, 0);
// gl.glVertex3f(0.50f, -0.50f, 0);
// gl.glEnd();
gl.glFlush();
}
@Override
public void dispose(GLAutoDrawable arg0) {
//method body
}
@Override
public void init(GLAutoDrawable arg0) {
// method body
}
@Override
public void reshape(GLAutoDrawable arg0, int arg1, int arg2, int arg3, int arg4) {
// method body
}
public static void main(String[] args) {
//getting the capabilities object of GL2 profile
final GLProfile profile = GLProfile.get(GLProfile.GL2);
GLCapabilities capabilities = new GLCapabilities(profile);
// The canvas
final GLCanvas glcanvas = new GLCanvas(capabilities);
ICGVector l = new ICGVector();
glcanvas.addGLEventListener(l);
glcanvas.setSize(400, 400);
//creating frame
final JFrame frame = new JFrame("ICG");
//adding canvas to frame
frame.getContentPane().add(glcanvas);
frame.setSize(frame.getContentPane().getPreferredSize());
frame.setVisible(true);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
}//end of main
}//end of classimport javax.media.opengl.GL2;
| 25.873418
| 86
| 0.613014
|
b4aec1863c74474d811fe2190afc468afb90b7c7
| 1,881
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contactcenterinsights/v1/contact_center_insights.proto
package com.google.cloud.contactcenterinsights.v1;
public interface UpdateIssueRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.cloud.contactcenterinsights.v1.UpdateIssueRequest)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. The new values for the issue.
* </pre>
*
* <code>.google.cloud.contactcenterinsights.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the issue field is set.
*/
boolean hasIssue();
/**
* <pre>
* Required. The new values for the issue.
* </pre>
*
* <code>.google.cloud.contactcenterinsights.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The issue.
*/
com.google.cloud.contactcenterinsights.v1.Issue getIssue();
/**
* <pre>
* Required. The new values for the issue.
* </pre>
*
* <code>.google.cloud.contactcenterinsights.v1.Issue issue = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
com.google.cloud.contactcenterinsights.v1.IssueOrBuilder getIssueOrBuilder();
/**
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
* @return Whether the updateMask field is set.
*/
boolean hasUpdateMask();
/**
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
* @return The updateMask.
*/
com.google.protobuf.FieldMask getUpdateMask();
/**
* <pre>
* The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder();
}
| 29.390625
| 115
| 0.673578
|
0b6a567eda36afac1ad2b6bfb03513eb716103b6
| 4,162
|
/*
* Copyright 2021 Shulie Technology, Co.Ltd
* Email: shulie@shulie.io
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pamirs.tro.entity.domain.vo;
import java.io.Serializable;
import java.util.Date;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.pamirs.tro.common.util.DateToStringFormatSerialize;
import org.springframework.format.annotation.DateTimeFormat;
public class TShadowTableConfigVo implements Serializable {
private static final long serialVersionUID = -7033824380871303262L;
/**
* 影子表主键id
*/
private String id;
/**
* 应用名称
*/
private String applicationName;
/**
* 应用id
*/
private String applicationId;
/**
* 数据库 ip 端口port
*/
private String databaseIpPort;
/**
* 数据库表明
*/
private String databaseName;
/**
* 影子库表名
*/
private String shadowTableName;
/**
* 使用状态
*/
private Integer enableStatus;
/**
* 影子表数据源id
*/
private String shadowDatasourceId;
/**
* 创建时间
*/
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonSerialize(using = DateToStringFormatSerialize.class)
private Date createTime;
private Integer useShadowTable;
public String getApplicationName() {
return applicationName;
}
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getDatabaseIpPort() {
return databaseIpPort;
}
public void setDatabaseIpPort(String databaseIpPort) {
this.databaseIpPort = databaseIpPort;
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getShadowTableName() {
return shadowTableName;
}
public void setShadowTableName(String shadowTableName) {
this.shadowTableName = shadowTableName;
}
public Integer getEnableStatus() {
return enableStatus;
}
public void setEnableStatus(Integer enableStatus) {
this.enableStatus = enableStatus;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getShadowDatasourceId() {
return shadowDatasourceId;
}
public void setShadowDatasourceId(String shadowDatasourceId) {
this.shadowDatasourceId = shadowDatasourceId;
}
public Integer getUseShadowTable() {
return useShadowTable;
}
public void setUseShadowTable(Integer useShadowTable) {
this.useShadowTable = useShadowTable;
}
@Override
public String toString() {
return "TShadowTableConfigVo{" +
"id='" + id + '\'' +
", applicationName='" + applicationName + '\'' +
", applicationId='" + applicationId + '\'' +
", databaseIpPort='" + databaseIpPort + '\'' +
", databaseName='" + databaseName + '\'' +
", shadowTableName='" + shadowTableName + '\'' +
", enableStatus=" + enableStatus +
", shadowDatasourceId='" + shadowDatasourceId + '\'' +
", createTime=" + createTime +
", useShadowTable=" + useShadowTable +
'}';
}
}
| 23.91954
| 71
| 0.635752
|
b59bbe68c4b6c4317067f54f266d2ea69e0b8796
| 889
|
package org.javalite.activejdbc.test_models;
import org.javalite.activejdbc.Model;
import org.javalite.conversion.ConverterAdapter;
import org.javalite.common.Convert;
/**
* @author Igor Polevoy
*/
public class Page extends Model {
static {
validateNumericalityOf("word_count").greaterThan(10).onlyInteger().message("'word_count' must be an integer greater than 10");
convertWith(new StringToIntegerConverter(), "word_count");
}
}
class StringToIntegerConverter extends ConverterAdapter<String, Integer> {
@Override
protected Class<String> sourceClass() {
return String.class;
}
@Override
protected Class<Integer> destinationClass() {
return Integer.class;
}
@Override
protected Integer doConvert(String source) throws Exception {
return "zero".equals(source) ? 0 : Convert.toInteger(source);
}
}
| 26.147059
| 134
| 0.710911
|
f20cd2a6f5e15a2143244f7de6733a3aa8f96dfb
| 694
|
package matrix.diagonals;
/**
* Class Solution.
* Implements solution to change main diagonal and antidiagonal.
*
* @author Mishin Yura (mishin.inbox@gmail.com)
* @since 12.10.2021
*/
public final class Solution {
/**
* Method implements the solution.
*
* @param matrix Matrix
*/
public void process(final int[][] matrix) {
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[0].length; j++) {
if (i == j) {
matrix[i][j] = 1;
}
if ((i + j) == (matrix.length - 1)) {
matrix[i][j] = 1;
}
}
}
}
}
| 23.931034
| 64
| 0.459654
|
19cc108f6e6c3fd0cb4aba0b0543520036dc18b9
| 659
|
package org.robovm.bindings.facebook;
import org.robovm.rt.bro.ValuedEnum;
/** Control when {@link FBAppEvents} sends log events to the server. */
public enum FBAppEventsFlushBehavior implements ValuedEnum {
/** Flush automatically: periodically (once a minute or every 100 logged events) and always at app reactivation. */
Auto,
/** Only flush when the {@link FBAppEvents#flush()} method is called. When an app is moved to background/terminated, the events
* are persisted and re-established at activation, but they will only be written with an explicit call to `flush`. */
ExplicitOnly;
@Override
public long value () {
return ordinal();
}
}
| 34.684211
| 128
| 0.749621
|
8da0e75e20a27d51f03c424fc326dcb21faef73a
| 786
|
package de.davelee.trams.crm.services;
import de.davelee.trams.crm.model.Order;
import de.davelee.trams.crm.repository.OrderRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* Class to provide service operations for orders in TraMS CRM.
* @author Dave Lee
*/
@Service
public class OrderService {
@Autowired
private OrderRepository orderRepository;
/**
* Save the specified order object in the database.
* @param order a <code>Order</code> object to save in the database.
* @return a <code>boolean</code> which is true iff the order could be saved successfully.
*/
public boolean save(final Order order) {
return orderRepository.save(order) != null;
}
}
| 27.103448
| 94
| 0.726463
|
33f22ffbd0ec224e2e2e7d66b7fbd7964f7e99a2
| 2,964
|
package wec.workers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import wec.data.RawElasticResult;
import wec.data.WECContext;
import wec.extractors.ExtractFirstParagraph;
import wec.filters.ByCorefFilter;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
public class ExtractSearchNegativeExamplesWorker extends AWorker {
private static final Logger LOGGER = LoggerFactory.getLogger(ExtractSearchNegativeExamplesWorker.class);
private static final List<String> finalFirstPassagesList = new ArrayList<>();
private static final AtomicInteger totalPassages = new AtomicInteger();
private static final int MAX_TO_SAVE = 10000;
private final ExtractFirstParagraph extractor = new ExtractFirstParagraph();
private final ByCorefFilter filter = new ByCorefFilter();
@Override
public void run() {
LOGGER.debug("Preparing to parse " + this.getRawElasticResults().size() + " wikipedia first passage and validate mentions");
List<String> passagesList = new ArrayList<>();
for(RawElasticResult rawResult : this.getRawElasticResults()) {
List<WECContext> wecContexts = extractor.extract(rawResult);
if (!wecContexts.isEmpty()) {
WECContext wecContext = wecContexts.get(0);
List<String> contextAsArray = wecContext.getContextAsArray();
if(contextAsArray.size() > 10 && !wecContext.getMentionList().isEmpty() && filter.isConditionMet(wecContext)) {
passagesList.add(String.join(" ", contextAsArray));
}
}
}
synchronized (finalFirstPassagesList) {
finalFirstPassagesList.addAll(passagesList);
if(finalFirstPassagesList.size() >= MAX_TO_SAVE) {
writeToFile();
}
}
invokeListener();
}
public static int getTotalPassages() {
return totalPassages.get();
}
public static void close() {
if(!finalFirstPassagesList.isEmpty()) {
writeToFile();
}
}
private synchronized static void writeToFile() {
try {
LOGGER.debug("Writing-" + finalFirstPassagesList.size() + " passages");
String json = String.join("\n", finalFirstPassagesList);
Files.writeString(new File("input/Negative_First_Passages.txt").toPath(), json,
StandardCharsets.UTF_8, StandardOpenOption.CREATE, StandardOpenOption.APPEND);
totalPassages.addAndGet(finalFirstPassagesList.size());
LOGGER.debug("Done writing-" + totalPassages.get() + " passages till now");
finalFirstPassagesList.clear();
} catch (Exception e) {
LOGGER.error("Failed to write to file!", e);
}
}
}
| 39
| 132
| 0.668354
|
0b9a2a22226f5590f7941ee262dfed815a7714f5
| 1,827
|
package org.ofbiz.partner.scm.pricemgr;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.Date;
import org.ofbiz.entity.GenericValue;
/**
* 单价管理类
* @author Mark
*
*/
public class PriceMgr {
private int year ,month ;//当期年月
private IPriceCal priceCal;
//单实例模式
private static PriceMgr instance=null;
public static PriceMgr getInstance(){
if(instance==null){
instance=new PriceMgr();
}
return instance;
}
private PriceMgr(){
refreshPeriod();
}
public void refreshPeriod(){
//获取当前系统操作年月
Date curDate=Utils.getCurDate();
Calendar cal = Calendar.getInstance();
cal.setTime(curDate);
year=cal.get(Calendar.YEAR);
month=cal.get(Calendar.MONTH)+1;
//初始化具体单价计算接口
priceCal=PriceCalImpFactory.getPriceCalImp(PriceCalType.WeightedMovingAverage,year,month);
}
/**
* 计算单价 更新存货余额表
* 当isOut=true 时,表示出库计算
*
* @param item
* @param isOut 出库标识
* @return
* @throws Exception
*/
public synchronized BigDecimal calPrice(PriceCalItem item) throws Exception{
if(priceCal!=null){
return priceCal.calPrice(item);
}else{
throw new Exception("单价计算类实现为空");
}
}
/**
* 根据仓库编码和物料编码获取记录
* @param warehouseId
* @param materialId
* @return
* @throws Exception
*/
public GenericValue getCurMaterialBalanceValue(String warehouseId, String materialId) throws Exception {
if(priceCal!=null){
return priceCal.getCurMaterialBalanceValue(warehouseId, materialId);
}else{
throw new Exception("单价计算类实现为空");
}
}
/**
* 获取物料加权平均单价
*/
public BigDecimal getPrice(String warehouseId, String materialId) throws Exception {
if(priceCal!=null){
return priceCal.getPrice(warehouseId, materialId);
}else{
throw new Exception("单价计算类实现为空");
}
}
}
| 22.280488
| 106
| 0.679803
|
ffc7d0d6d359148fa2fe5e2ad4e0f4c9cceb4493
| 832
|
package com.anthunt.terraform.generator.core.model.terraform.nodes;
import com.anthunt.terraform.generator.core.model.terraform.AbstractMarshaller;
import com.anthunt.terraform.generator.core.model.terraform.elements.TFArguments;
import com.anthunt.terraform.generator.core.model.terraform.types.ProviderType;
import lombok.Builder;
@Builder
public class Provider extends AbstractMarshaller<Provider> {
private ProviderType providerType;
private TFArguments arguments;
@Override
protected String unmarshalling(int tabSize) {
return new StringBuffer()
.append("provider ")
.append(this.providerType.provider())
.append(" {\n")
.append(this.arguments.unmarshall(tabSize))
.append("}\n\n")
.toString();
}
}
| 33.28
| 81
| 0.689904
|
2669374cee76501d5a1492146491e8d4112e1545
| 60
|
/**
* Math operations ТЕСТ.
*/
package ru.job4j.condition;
| 15
| 27
| 0.683333
|
7fa8f7be98439c381b0a3cc7953115421143ce3d
| 9,941
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.Percentile;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase.randomPercents;
import static org.hamcrest.Matchers.equalTo;
public class InternalPercentilesBucketTests extends InternalAggregationTestCase<InternalPercentilesBucket> {
@Override
protected InternalPercentilesBucket createTestInstance(String name, Map<String, Object> metadata) {
return createTestInstance(name, metadata, randomPercents(), true);
}
private static InternalPercentilesBucket createTestInstance(String name, Map<String, Object> metadata,
double[] percents, boolean keyed) {
final double[] percentiles = new double[percents.length];
for (int i = 0; i < percents.length; ++i) {
percentiles[i] = frequently() ? randomDouble() : Double.NaN;
}
return createTestInstance(name, metadata, percents, percentiles, keyed);
}
private static InternalPercentilesBucket createTestInstance(String name, Map<String, Object> metadata,
double[] percents, double[] percentiles, boolean keyed) {
DocValueFormat format = randomNumericDocValueFormat();
return new InternalPercentilesBucket(name, percents, percentiles, keyed, format, metadata);
}
@Override
public void testReduceRandom() {
expectThrows(UnsupportedOperationException.class, () -> createTestInstance("name", null).reduce(null, null));
}
@Override
protected void assertReduced(InternalPercentilesBucket reduced, List<InternalPercentilesBucket> inputs) {
// no test since reduce operation is unsupported
}
@Override
protected final void assertFromXContent(InternalPercentilesBucket aggregation, ParsedAggregation parsedAggregation) {
assertTrue(parsedAggregation instanceof ParsedPercentilesBucket);
ParsedPercentilesBucket parsedPercentiles = (ParsedPercentilesBucket) parsedAggregation;
for (Percentile percentile : aggregation) {
Double percent = percentile.getPercent();
assertEquals(aggregation.percentile(percent), parsedPercentiles.percentile(percent), 0);
// we cannot ensure we get the same as_string output for Double.NaN values since they are rendered as
// null and we don't have a formatted string representation in the rest output
if (Double.isNaN(aggregation.percentile(percent)) == false) {
assertEquals(aggregation.percentileAsString(percent), parsedPercentiles.percentileAsString(percent));
}
}
}
/**
* check that we don't rely on the percent array order and that the iterator returns the values in the original order
*/
public void testPercentOrder() {
final double[] percents = new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 };
InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyMap(), percents, randomBoolean());
Iterator<Percentile> iterator = aggregation.iterator();
for (double percent : percents) {
assertTrue(iterator.hasNext());
Percentile percentile = iterator.next();
assertEquals(percent, percentile.getPercent(), 0.0d);
assertEquals(aggregation.percentile(percent), percentile.getValue(), 0.0d);
}
}
public void testErrorOnDifferentArgumentSize() {
final double[] percents = new double[]{ 0.1, 0.2, 0.3};
final double[] percentiles = new double[]{ 0.10, 0.2};
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents,
percentiles, randomBoolean(), DocValueFormat.RAW, Collections.emptyMap()));
assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]",
e.getMessage());
}
public void testParsedAggregationIteratorOrder() throws IOException {
final InternalPercentilesBucket aggregation = createTestInstance();
final Iterable<Percentile> parsedAggregation = parseAndAssert(aggregation, false, false);
Iterator<Percentile> it = aggregation.iterator();
Iterator<Percentile> parsedIt = parsedAggregation.iterator();
while (it.hasNext()) {
assertEquals(it.next(), parsedIt.next());
}
}
public void testEmptyRanksXContent() throws IOException {
double[] percents = new double[]{1,2,3};
double[] percentiles = new double[3];
for (int i = 0; i < 3; ++i) {
percentiles[i] = randomBoolean() ? Double.NaN : Double.POSITIVE_INFINITY;
}
boolean keyed = randomBoolean();
InternalPercentilesBucket agg = createTestInstance("test", Collections.emptyMap(), percents, percentiles, keyed);
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
String expected;
if (keyed) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"2.0\" : null,\n" +
" \"3.0\" : null\n" +
" }\n" +
"}";
} else {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null\n" +
" }\n" +
" ]\n" +
"}";
}
assertThat(Strings.toString(builder), equalTo(expected));
}
@Override
protected Predicate<String> excludePathsFromXContentInsertion() {
return path -> path.endsWith(CommonFields.VALUES.getPreferredName());
}
@Override
protected InternalPercentilesBucket mutateInstance(InternalPercentilesBucket instance) {
String name = instance.getName();
double[] percents = extractPercents(instance);
double[] percentiles = extractPercentiles(instance);
DocValueFormat formatter = instance.formatter();
Map<String, Object> metadata = instance.getMetadata();
switch (between(0, 3)) {
case 0:
name += randomAlphaOfLength(5);
break;
case 1:
percents = Arrays.copyOf(percents, percents.length);
percents[percents.length - 1] = randomDouble();
break;
case 2:
percentiles = Arrays.copyOf(percentiles, percentiles.length);
percentiles[percentiles.length - 1] = randomDouble();
break;
case 3:
if (metadata == null) {
metadata = new HashMap<>(1);
} else {
metadata = new HashMap<>(instance.getMetadata());
}
metadata.put(randomAlphaOfLength(15), randomInt());
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new InternalPercentilesBucket(name, percents, percentiles, randomBoolean(), formatter, metadata);
}
private double[] extractPercentiles(InternalPercentilesBucket instance) {
List<Double> values = new ArrayList<>();
instance.iterator().forEachRemaining(percentile -> values.add(percentile.getValue()));
double[] valuesArray = new double[values.size()];
for (int i = 0; i < values.size(); i++) {
valuesArray[i] = values.get(i);
}
return valuesArray;
}
private double[] extractPercents(InternalPercentilesBucket instance) {
List<Double> percents = new ArrayList<>();
instance.iterator().forEachRemaining(percentile -> percents.add(percentile.getPercent()));
double[] percentArray = new double[percents.size()];
for (int i = 0; i < percents.size(); i++) {
percentArray[i] = percents.get(i);
}
return percentArray;
}
}
| 43.221739
| 136
| 0.642893
|
6878f85781129f4fca650120891dd6fc5fe218ff
| 3,626
|
package io.confluent.demo.datamesh.cc.datacatalog.api;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import io.confluent.demo.datamesh.cc.datacatalog.model.*;
import io.confluent.demo.datamesh.model.AuditLogEntry;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.client.RestTemplate;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
@Service
public class TagService {
private final RestTemplate restTemplate;
@ResponseStatus(value= HttpStatus.NOT_FOUND)
public static class TagNotFoundException extends RuntimeException { }
public TagService(
RestTemplateBuilder builder,
@Value("${confluent.cloud.schemaregistry.auth.key}") String srKey,
@Value("${confluent.cloud.schemaregistry.auth.secret}") String srSecret,
@Value("${confluent.cloud.schemaregistry.url}") String baseUrl) {
restTemplate = builder
.rootUri(baseUrl + "/catalog/v1")
.basicAuthentication(srKey, srSecret)
.build();
}
public Tag getDataProductTagForSubjectVersion(String subjectVersionQualifiedName) {
String searchUrl = String.format("/entity/type/sr_subject_version/name/%s/tags", subjectVersionQualifiedName);
return Arrays.stream(restTemplate.getForEntity(searchUrl, Tag[].class)
.getBody())
.filter(tag -> tag.getTypeName().equals("DataProduct"))
.findFirst().orElseThrow(TagNotFoundException::new);
}
public TagServiceResponse unTagSubjectVersionAsDataProduct(String entityQualifiedName) {
String url = String.format(
"/entity/type/sr_subject_version/name/%s/tags/DataProduct",
entityQualifiedName);
restTemplate.delete(url);
return new TagServiceResponse(
Optional.empty(),
Optional.of(new AuditLogEntry(
String.format("Delete DataProduct tag from entity '%s'", entityQualifiedName),
String.format("DELETE %s", url) )));
}
public TagServiceResponse tagSubjectVersionAsDataProduct(
String entityQualifiedName,
DataProductTag tag) throws JsonProcessingException
{
String url = String.format("/entity/tags");
List<DataProductTagEntityRequest> request = Arrays.asList(
new DataProductTagEntityRequest(entityQualifiedName, tag));
/// 404 Not Found: [{"error_code":4040009,"message":
// "Instance sr_subject_version with unique attribute
// {qualifiedName=lsrc-7xxv2:.:rc-7xxv2:.:pksqlc-09g26PAGEVIEWS_USER2-value:2} does not exist"}]
ResponseEntity<TagResponse[]> response = restTemplate.postForEntity(
url, request, TagResponse[].class);
return new TagServiceResponse(
Optional.of(response.getBody()),
Optional.of(new AuditLogEntry(
String.format("Tag entity '%s' with 'DataProduct' tag", entityQualifiedName),
String.format("POST %s\n%s",
url,
new ObjectMapper().writer().withDefaultPrettyPrinter().writeValueAsString(request)))));
}
}
| 43.686747
| 118
| 0.686707
|
2fb2f38e94fd22f36a024b00e1c4e250b579f96b
| 2,376
|
/*
* Copyright 2014 - 2019 Michael Rapp
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.adapter.list.sortable;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.Collection;
import java.util.Comparator;
import de.mrapp.android.adapter.list.ListAdapter;
import de.mrapp.android.adapter.Order;
/**
* Defines the interface, all listeners, which should be notified when the underlying data of a
* {@link ListAdapter} have been sorted, must implement.
*
* @param <DataType>
* The type of the observed adapter's underlying data
* @author Michael Rapp
* @since 0.1.0
*/
public interface ListSortingListener<DataType> {
/**
* The method, which is invoked, when the adapter's items have been sorted.
*
* @param adapter
* The observed adapters as an instance of the type {@link ListAdapter}. The adapter may
* not be null
* @param sortedItems
* A collection, which contains the adapter's sorted items, as an instance of the type
* {@link Collection} or an empty collection, if the adapter does not contain any items
* @param order
* The order, which has been used to sort the adapter's items, as a value of the enum
* {@link Order}. The order may either be <code>ASCENDING</code> or
* <code>DESCENDING</code>
* @param comparator
* The comparator, which has been used to compare the single items, as an instance of
* the type {@link Comparator} or null, if the items' implementation of the type {@link
* Comparable} has been used instead
*/
void onSorted(@NonNull ListAdapter<DataType> adapter, @NonNull Collection<DataType> sortedItems,
@NonNull Order order, @Nullable Comparator<DataType> comparator);
}
| 41.684211
| 100
| 0.694444
|
88545fb6b4ca2f175b15a234faec9580900f82ce
| 278
|
package guru.springframework.commands;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
/**
* Created by jt on 6/21/17.
*/
@Setter
@Getter
@NoArgsConstructor
public class CategoryCommand {
private String id;
private String description;
}
| 16.352941
| 38
| 0.758993
|
188ecd72ce5f599b4cff84c32ae7a8ebacbf0a0e
| 1,354
|
package bibioteca;
import java.util.Scanner;
public class Bibioteca {
public static void main(String[] args) {
Scanner leitura = new Scanner (System.in);
livro l1 = new livro();
System.out.println("Digite o nome do título: ");
l1.titulo = leitura.nextLine();
System.out.println("Digite o nome do autor:");
l1.autor = leitura.nextLine();
System.out.println("Digite o gênero do livro: ");
l1.setGenero(leitura.nextLine());
System.out.println("Digite o ano do livro: ");
l1.setAno(leitura.nextInt());
System.out.println("Digite o preço do livro: ");
l1.setPreco(leitura.nextDouble());
System.out.println("::::::::::::::::::::::::::::::");
System.out.println("::::::::::::::::::::::::::::::");
System.out.println("::::::::::::::::::::::::::::::");
System.out.println("Nome do titulo do livro: "+l1.getTitulo());
System.out.println("Nome do autor: "+l1.getAutor());
System.out.println("DGênero do livro: "+l1.getGenero());
System.out.println("Ano do livro: "+l1.getAno());
System.out.println("Preço do livro: "+l1.getPreco());
System.out.println(l1.verificarPreco());
}
}
| 37.611111
| 72
| 0.519202
|
f506b83696f9aac2513490af30b935f127ff7032
| 170
|
package bingo.odata.expression;
public interface IsofExpression extends BoolExpression {
Expression getExpression(); // optional
String getType();
}
| 17
| 57
| 0.711765
|
22244a330c27f5a689328908ac8d456b1be7941b
| 1,506
|
package org.anc.lapps.gate;
import gate.Document;
import gate.Factory;
import gate.FeatureMap;
import org.lappsgrid.core.DataFactory;
import org.lappsgrid.annotations.ServiceMetadata;
import org.lappsgrid.vocabulary.Annotations;
/**
* @author Keith Suderman
*/
@ServiceMetadata(
description = "GATE OrthoMatcher",
requires = {"http://vocab.lappsgrid.org/Person"},
produces = {"\thttp://vocab.lappsgrid.org/NamedEntity#matches"}
)
public class OrthoMatcher extends SimpleGateService
{
public OrthoMatcher()
{
super(OrthoMatcher.class);
createResource("gate.creole.orthomatcher.OrthoMatcher");
}
public String execute(String input)
{
Document document = null;
try
{
document = doExecute(input);
}
catch (Exception e)
{
return DataFactory.error("Unable to execute the OrthoMatcher.", e);
}
if (document == null)
{
return DataFactory.error("This was unexpected...");
}
String producer = this.getClass().getName() + "_" + Version.getVersion();
FeatureMap features = document.getFeatures();
Integer step = (Integer) features.get("lapps:step");
if (step == null) {
step = 1;
}
features.put("lapps:step", step + 1);
features.put("lapps:coref", step + " " + producer + " coref:annie");
String result = DataFactory.gateDocument(document.toXml());
Factory.deleteResource(document);
return result;
}
}
| 27.381818
| 79
| 0.641434
|
24b65c1c0bcc2d10c764474799c5ba6706463cbb
| 209
|
package prospector.industryreborn.tiles;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.ITickable;
public abstract class TileEntityMachine extends TileEntity implements ITickable {
}
| 23.222222
| 81
| 0.84689
|
aab2b0e63831b0ced5d3f6bf098750a533c58b3d
| 364
|
package slogo.compiler.turtle.command;
import slogo.compiler.turtle.TurtleCommand;
public class ForwardCommand extends TurtleCommand {
public ForwardCommand(String declaration) {
super(declaration);
desiredArgs = 1;
}
@Override
public double executeTurtle() {
double val = args.get(0).execute();
turtle.move(val);
return val;
}
}
| 19.157895
| 51
| 0.714286
|
e3efd5ae312af13f6e6934af638742112ffb1b2c
| 5,049
|
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
import org.hibernate.Session;
import org.hibernate.Transaction;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.ResourceBundle;
public class ManagmentScene_Controller implements Initializable {
@FXML
private TextField insertName;
@FXML
private TextField insertPrice;
@FXML
private TextField insertAmount;
@FXML
private TextField insertSerialNumber;
@FXML
private ChoiceBox<String> chooseCategory;
@FXML
private Button addButton;
@FXML
private Label currentlyLoggedIn;
@FXML
private TextField insertName1;
@FXML
private Button addButton1;
Hibernate_Controller hibernate_controller = new Hibernate_Controller();
Session session = hibernate_controller.getSession();
@FXML
void branchButton( MouseEvent event) throws IOException {
Parent root = FXMLLoader.load(getClass().getResource("View/branchScene.fxml"));
Scene scene = new Scene(root);
Stage stage = (Stage) ((Node)event.getSource()).getScene().getWindow();
stage.setScene(scene);
stage.show();
}
@FXML
void cartButton(MouseEvent event) throws IOException {
Parent root = FXMLLoader.load(getClass().getResource("View/cartScene.fxml"));
Scene scene = new Scene(root);
Stage stage = (Stage) ((Node)event.getSource()).getScene().getWindow();
stage.setScene(scene);
stage.show();
}
@FXML
void currentWorkerButton(MouseEvent event) throws IOException {
Parent root = FXMLLoader.load(getClass().getResource("View/currentWorkerScene.fxml"));
Scene scene = new Scene(root);
Stage stage = (Stage) ((Node)event.getSource()).getScene().getWindow();
stage.setScene(scene);
stage.show();
}
@FXML
void shopButton(MouseEvent event) throws IOException {
Parent root = FXMLLoader.load(getClass().getResource("View/shopScene.fxml"));
Scene scene = new Scene(root);
Stage stage = (Stage) ((Node)event.getSource()).getScene().getWindow();
stage.setScene(scene);
stage.show();
}
@FXML
void workersButton(MouseEvent event) throws IOException {
Parent root = FXMLLoader.load(getClass().getResource("View/workersScene.fxml"));
Scene scene = new Scene(root);
Stage stage = (Stage) ((Node)event.getSource()).getScene().getWindow();
stage.setScene(scene);
stage.show();
}
@Override
public void initialize ( URL location , ResourceBundle resources ) {
addNewPart();
currentlyLogged();
}
public void addNewPart(){
List<Category> category;
category = session.createQuery( "FROM Category" ).getResultList();
ObservableList<Category> categoryObservableList = FXCollections.observableArrayList(category);
chooseCategory.getItems().clear();
insertSerialNumber.clear();
insertName.clear();
insertAmount.clear();
insertPrice.clear();
insertName1.clear();
int i = 0;
while(i < categoryObservableList.size()) {
chooseCategory.getItems( ).add( categoryObservableList.get( i ).getCategory_name() );
i++;
}
addButton.setOnAction(event -> {
if(!insertPrice.getText().isEmpty() || !insertAmount.getText().isEmpty() || !insertName.getText().isEmpty() || !insertSerialNumber.getText().isEmpty()){
Transaction transaction = session.beginTransaction();
CarParts tempCze = new CarParts( insertName.getText(), Long.parseLong( insertSerialNumber.getText() ), Float.parseFloat( insertPrice.getText() ), Integer.parseInt( insertAmount.getText() ), "Dostepny" );
Category tempKat = session.get( Category.class, category.get( chooseCategory.getSelectionModel().getSelectedIndex() ).getId_cat());
tempCze.setKategoria( tempKat );
session.save( tempCze );
transaction.commit();
}
} );
addButton1.setOnAction(event -> {
if(!insertName1.getText().isEmpty()){
Category temp = new Category( insertName1.getText() );
Transaction transaction = session.beginTransaction();
session.save( temp );
transaction.commit();
addNewPart();
}
} );
}
public void currentlyLogged() {
LoginScene_Controller loginScene_controller = new LoginScene_Controller();
currentlyLoggedIn.setText( loginScene_controller.getNameTemp() + " " + loginScene_controller.getSurnameTemp() );
}
}
| 33.885906
| 219
| 0.654189
|
64c1f3a4a55da07053b98560007c473213f2af18
| 2,954
|
package com.loginlogout;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* Servlet implementation class LoginServlet
*/
@WebServlet("/LoginServlet")
public class LoginServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public LoginServlet() {
super();
// TODO Auto-generated constructor stub
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
String jdbcurl = "jdbc:mysql://localhost:3306/librarydb";
String Username = "root";
String driverclassname = "com.mysql.cj.jdbc.Driver";
String sql = "";
try {
Class.forName(driverclassname);
Connection con = DriverManager.getConnection(jdbcurl,Username,"");
PreparedStatement st = con.prepareStatement("select * from admindata where EmailId = ? AND Password = ?");
String email = request.getParameter("email");
String password = request.getParameter("password");
st.setString(1,email);
st.setString(2, password);
ResultSet rs = st.executeQuery();
HttpSession sc = request.getSession();
PrintWriter out = response.getWriter();
if(rs.next() == false) {
PreparedStatement st1 = con.prepareStatement("select * from librarian where LemailId = ? AND Lpassword = ?");
st1.setString(1,email);
st1.setString(2, password);
ResultSet rs1 = st1.executeQuery();
System.out.println("HELLO WORLD");
if(rs1.next() == false) {
out.println("<script>alert('Incorrect Email Or Password Please Enter Again')</script>");
out.println("<script>\r\n"
+ " window.location = 'index.jsp'"
+ "</script>");
}
else {
sc.setAttribute("LoginOfLibrarian", rs1.getString("Lname"));
out.println("<script>\r\n"
+ " window.location = 'ViewBook.jsp'"
+ "</script>");
}
}
else {
sc.setAttribute("LoginOfAdmin", rs.getString("Name"));
out.println("<script>\r\n"
+ " window.location = 'adminWork.jsp'"
+ "</script>");
}
}catch(Exception ex) {
System.out.println("HELLO");
}
}
}
| 33.954023
| 132
| 0.596141
|
0c738c6548a35bc8837135f0b25d9dfb7f1a3c05
| 1,114
|
package org.fluxtream.connectors.runkeeper;
import org.fluxtream.core.TimeInterval;
import org.fluxtream.core.connectors.vos.AbstractTimedFacetVO;
import org.fluxtream.core.domain.GuestSettings;
import org.fluxtream.core.mvc.models.DurationModel;
/**
*
* @author Candide Kemmler (candide@fluxtream.com)
*/
public class RunKeeperFitnessActivityFacetVO extends AbstractTimedFacetVO<RunKeeperFitnessActivityFacet> {
public Integer averageHeartRate;
public double total_distance;
public Double total_climb;
public String activityType;
public Double totalCalories;
@Override
protected void fromFacet(final RunKeeperFitnessActivityFacet facet, final TimeInterval timeInterval, final GuestSettings settings) {
this.totalCalories = facet.totalCalories;
this.averageHeartRate = (facet.averageHeartRate!=null && facet.averageHeartRate>0)?facet.averageHeartRate:null;
this.total_distance = facet.total_distance;
this.duration = new DurationModel(facet.duration);
this.total_climb = facet.total_climb;
this.activityType = facet.type;
}
}
| 35.935484
| 136
| 0.77289
|
931e68096fb76935e128f3a50086cb455092df33
| 1,232
|
package org.easyubl.models.jpa.entity;
import org.easyubl.models.CollaboratorPermissionType;
import javax.persistence.*;
@Table(name = "cl_migration_model")
@Entity
public class MigrationModelEntity {
public static final String SINGLETON_ID = "SINGLETON";
@Id
@Column(name = "id", length = 36)
@Access(CollaboratorPermissionType.PROPERTY)
// we do this because relationships often fetch id, but not entity. This avoids an extra SQL
private String id;
@Column(name = "version", length = 36)
protected String version;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null) return false;
if (!(o instanceof MigrationModelEntity)) return false;
MigrationModelEntity that = (MigrationModelEntity) o;
if (!id.equals(that.id)) return false;
return true;
}
@Override
public int hashCode() {
return id.hashCode();
}
}
| 21.614035
| 97
| 0.636364
|
1ec469c4afef69a7328fb80df7b2e88858661176
| 3,414
|
package org.batfish.common.topology.broadcast;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.annotations.VisibleForTesting;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.batfish.datamodel.collections.NodeInterfacePair;
/**
* Represents any interface with an IP address.
*
* <p>SVI/IRB/Vlan interfaces connect to the {@link DeviceBroadcastDomain} they are associated with.
*
* <p>Ethernet (sub)interfaces connect to the {@link PhysicalInterface} they are bound to. Even if
* the configuration has an IP address directly assigned to Ethernet1, say, there will be both an
* {@link L3Interface} and {@link PhysicalInterface} for Ethernet1.
*/
public final class L3Interface extends Node<L3Interface.Unit> {
/** There is no data needed for {@link L3Interface}, but we can't have a non-null {@link Void} */
public enum Unit {
/** The only legal value. */
VALUE
}
public L3Interface(NodeInterfacePair iface) {
_iface = iface;
}
public @Nonnull NodeInterfacePair getIface() {
return _iface;
}
public void originate(Set<L3Interface> domain, Set<NodeAndData<?, ?>> visited) {
// No need to check or add this to visited, we'll never come back here.
domain.add(this);
assert _sendToInterface == null || _sendToSwitch == null;
if (_sendToInterface != null) {
assert _sendToInterfaceEdge != null; // contract
_sendToInterfaceEdge
.traverse(Unit.VALUE)
.ifPresent(tag -> _sendToInterface.transmit(tag, domain, visited));
}
if (_sendToSwitch != null) {
assert _sendToSwitchEdge != null; // contract
_sendToSwitchEdge
.traverse(Unit.VALUE)
.ifPresent(vlan -> _sendToSwitch.broadcast(vlan, domain, visited));
}
}
public void reached(Set<L3Interface> domain, Set<NodeAndData<?, ?>> visited) {
domain.add(this);
}
public void sendDirectlyOutIface(PhysicalInterface iface, Edge<Unit, EthernetTag> edge) {
checkState(
_sendToInterface == null && _sendToSwitch == null,
"Cannot connect an L3Interface to two places");
_sendToInterface = iface;
_sendToInterfaceEdge = edge;
}
public void sendThroughSwitch(DeviceBroadcastDomain sw, Edge<Unit, Integer> edge) {
checkState(
_sendToInterface == null && _sendToSwitch == null,
"Cannot connect an L3Interface to two places");
_sendToSwitch = sw;
_sendToSwitchEdge = edge;
}
// Internal details
private @Nullable PhysicalInterface _sendToInterface;
private @Nullable Edge<Unit, EthernetTag> _sendToInterfaceEdge;
private @Nullable DeviceBroadcastDomain _sendToSwitch;
private @Nullable Edge<Unit, Integer> _sendToSwitchEdge;
@VisibleForTesting
@Nullable
PhysicalInterface getSendToInterfaceForTesting() {
return _sendToInterface;
}
@VisibleForTesting
@Nullable
DeviceBroadcastDomain getSendToSwitchForTesting() {
return _sendToSwitch;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof L3Interface)) {
return false;
}
L3Interface that = (L3Interface) o;
return _iface.equals(that._iface);
}
@Override
public int hashCode() {
return 31 * L3Interface.class.hashCode() + _iface.hashCode();
}
private final @Nonnull NodeInterfacePair _iface;
}
| 30.756757
| 100
| 0.70826
|
e342138899273c804f1028425c29f17494eb6c1c
| 39,659
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is not the original file distributed by the Apache Software Foundation
* It has been modified by the Hipparchus project
*/
package org.hipparchus.stat.regression;
import java.util.Arrays;
import org.hipparchus.exception.LocalizedCoreFormats;
import org.hipparchus.exception.MathIllegalArgumentException;
import org.hipparchus.stat.LocalizedStatFormats;
import org.hipparchus.util.FastMath;
import org.hipparchus.util.MathUtils;
import org.hipparchus.util.Precision;
/**
* This class is a concrete implementation of the {@link UpdatingMultipleLinearRegression} interface.
*
* <p>The algorithm is described in: <pre>
* Algorithm AS 274: Least Squares Routines to Supplement Those of Gentleman
* Author(s): Alan J. Miller
* Source: Journal of the Royal Statistical Society.
* Series C (Applied Statistics), Vol. 41, No. 2
* (1992), pp. 458-478
* Published by: Blackwell Publishing for the Royal Statistical Society
* Stable URL: http://www.jstor.org/stable/2347583 </pre></p>
*
* <p>This method for multiple regression forms the solution to the OLS problem
* by updating the QR decomposition as described by Gentleman.</p>
*
*/
public class MillerUpdatingRegression implements UpdatingMultipleLinearRegression {
/** number of variables in regression */
private final int nvars;
/** diagonals of cross products matrix */
private final double[] d;
/** the elements of the R`Y */
private final double[] rhs;
/** the off diagonal portion of the R matrix */
private final double[] r;
/** the tolerance for each of the variables */
private final double[] tol;
/** residual sum of squares for all nested regressions */
private final double[] rss;
/** order of the regressors */
private final int[] vorder;
/** scratch space for tolerance calc */
private final double[] work_tolset;
/** number of observations entered */
private long nobs;
/** sum of squared errors of largest regression */
private double sserr;
/** has rss been called? */
private boolean rss_set;
/** has the tolerance setting method been called */
private boolean tol_set;
/** flags for variables with linear dependency problems */
private final boolean[] lindep;
/** singular x values */
private final double[] x_sing;
/** workspace for singularity method */
private final double[] work_sing;
/** summation of Y variable */
private double sumy;
/** summation of squared Y values */
private double sumsqy;
/** boolean flag whether a regression constant is added */
private final boolean hasIntercept;
/** zero tolerance */
private final double epsilon;
/**
* Set the default constructor to private access
* to prevent inadvertent instantiation
*/
@SuppressWarnings("unused")
private MillerUpdatingRegression() {
this(-1, false, Double.NaN);
}
/**
* This is the augmented constructor for the MillerUpdatingRegression class.
*
* @param numberOfVariables number of regressors to expect, not including constant
* @param includeConstant include a constant automatically
* @param errorTolerance zero tolerance, how machine zero is determined
* @throws MathIllegalArgumentException if {@code numberOfVariables is less than 1}
*/
public MillerUpdatingRegression(int numberOfVariables, boolean includeConstant, double errorTolerance)
throws MathIllegalArgumentException {
if (numberOfVariables < 1) {
throw new MathIllegalArgumentException(LocalizedStatFormats.NO_REGRESSORS);
}
if (includeConstant) {
this.nvars = numberOfVariables + 1;
} else {
this.nvars = numberOfVariables;
}
this.hasIntercept = includeConstant;
this.nobs = 0;
this.d = new double[this.nvars];
this.rhs = new double[this.nvars];
this.r = new double[this.nvars * (this.nvars - 1) / 2];
this.tol = new double[this.nvars];
this.rss = new double[this.nvars];
this.vorder = new int[this.nvars];
this.x_sing = new double[this.nvars];
this.work_sing = new double[this.nvars];
this.work_tolset = new double[this.nvars];
this.lindep = new boolean[this.nvars];
for (int i = 0; i < this.nvars; i++) {
vorder[i] = i;
}
if (errorTolerance > 0) {
this.epsilon = errorTolerance;
} else {
this.epsilon = -errorTolerance;
}
}
/**
* Primary constructor for the MillerUpdatingRegression.
*
* @param numberOfVariables maximum number of potential regressors
* @param includeConstant include a constant automatically
* @throws MathIllegalArgumentException if {@code numberOfVariables is less than 1}
*/
public MillerUpdatingRegression(int numberOfVariables, boolean includeConstant)
throws MathIllegalArgumentException {
this(numberOfVariables, includeConstant, Precision.EPSILON);
}
/**
* A getter method which determines whether a constant is included.
* @return true regression has an intercept, false no intercept
*/
@Override
public boolean hasIntercept() {
return this.hasIntercept;
}
/**
* Gets the number of observations added to the regression model.
* @return number of observations
*/
@Override
public long getN() {
return this.nobs;
}
/**
* Adds an observation to the regression model.
* @param x the array with regressor values
* @param y the value of dependent variable given these regressors
* @exception MathIllegalArgumentException if the length of {@code x} does not equal
* the number of independent variables in the model
*/
@Override
public void addObservation(final double[] x, final double y)
throws MathIllegalArgumentException {
if ((!this.hasIntercept && x.length != nvars) ||
(this.hasIntercept && x.length + 1 != nvars)) {
throw new MathIllegalArgumentException(LocalizedStatFormats.INVALID_REGRESSION_OBSERVATION,
x.length, nvars);
}
if (!this.hasIntercept) {
include(x.clone(), 1.0, y);
} else {
final double[] tmp = new double[x.length + 1];
System.arraycopy(x, 0, tmp, 1, x.length);
tmp[0] = 1.0;
include(tmp, 1.0, y);
}
++nobs;
}
/**
* Adds multiple observations to the model.
* @param x observations on the regressors
* @param y observations on the regressand
* @throws MathIllegalArgumentException if {@code x} is not rectangular, does not match
* the length of {@code y} or does not contain sufficient data to estimate the model
*/
@Override
public void addObservations(double[][] x, double[] y) throws MathIllegalArgumentException {
MathUtils.checkNotNull(x, LocalizedCoreFormats.INPUT_ARRAY);
MathUtils.checkNotNull(y, LocalizedCoreFormats.INPUT_ARRAY);
MathUtils.checkDimension(x.length, y.length);
if (x.length == 0) { // Must be no y data either
throw new MathIllegalArgumentException(LocalizedCoreFormats.NO_DATA);
}
if (x[0].length + 1 > x.length) {
throw new MathIllegalArgumentException(
LocalizedStatFormats.NOT_ENOUGH_DATA_FOR_NUMBER_OF_PREDICTORS,
x.length, x[0].length);
}
for (int i = 0; i < x.length; i++) {
addObservation(x[i], y[i]);
}
}
/**
* The include method is where the QR decomposition occurs. This statement forms all
* intermediate data which will be used for all derivative measures.
* According to the miller paper, note that in the original implementation the x vector
* is overwritten. In this implementation, the include method is passed a copy of the
* original data vector so that there is no contamination of the data. Additionally,
* this method differs slightly from Gentleman's method, in that the assumption is
* of dense design matrices, there is some advantage in using the original gentleman algorithm
* on sparse matrices.
*
* @param x observations on the regressors
* @param wi weight of the this observation (-1,1)
* @param yi observation on the regressand
*/
private void include(final double[] x, final double wi, final double yi) {
int nextr = 0;
double w = wi;
double y = yi;
double xi;
double di;
double wxi;
double dpi;
double xk;
double _w;
this.rss_set = false;
sumy = smartAdd(yi, sumy);
sumsqy = smartAdd(sumsqy, yi * yi);
for (int i = 0; i < x.length; i++) {
if (w == 0.0) {
return;
}
xi = x[i];
if (xi == 0.0) {
nextr += nvars - i - 1;
continue;
}
di = d[i];
wxi = w * xi;
_w = w;
if (di != 0.0) {
dpi = smartAdd(di, wxi * xi);
final double tmp = wxi * xi / di;
if (FastMath.abs(tmp) > Precision.EPSILON) {
w = (di * w) / dpi;
}
} else {
dpi = wxi * xi;
w = 0.0;
}
d[i] = dpi;
for (int k = i + 1; k < nvars; k++) {
xk = x[k];
x[k] = smartAdd(xk, -xi * r[nextr]);
if (di != 0.0) {
r[nextr] = smartAdd(di * r[nextr], (_w * xi) * xk) / dpi;
} else {
r[nextr] = xk / xi;
}
++nextr;
}
xk = y;
y = smartAdd(xk, -xi * rhs[i]);
if (di != 0.0) {
rhs[i] = smartAdd(di * rhs[i], wxi * xk) / dpi;
} else {
rhs[i] = xk / xi;
}
}
sserr = smartAdd(sserr, w * y * y);
}
/**
* Adds to number a and b such that the contamination due to
* numerical smallness of one addend does not corrupt the sum.
* @param a - an addend
* @param b - an addend
* @return the sum of the a and b
*/
private double smartAdd(double a, double b) {
final double _a = FastMath.abs(a);
final double _b = FastMath.abs(b);
if (_a > _b) {
final double eps = _a * Precision.EPSILON;
if (_b > eps) {
return a + b;
}
return a;
} else {
final double eps = _b * Precision.EPSILON;
if (_a > eps) {
return a + b;
}
return b;
}
}
/**
* As the name suggests, clear wipes the internals and reorders everything in the
* canonical order.
*/
@Override
public void clear() {
Arrays.fill(this.d, 0.0);
Arrays.fill(this.rhs, 0.0);
Arrays.fill(this.r, 0.0);
Arrays.fill(this.tol, 0.0);
Arrays.fill(this.rss, 0.0);
Arrays.fill(this.work_tolset, 0.0);
Arrays.fill(this.work_sing, 0.0);
Arrays.fill(this.x_sing, 0.0);
Arrays.fill(this.lindep, false);
for (int i = 0; i < nvars; i++) {
this.vorder[i] = i;
}
this.nobs = 0;
this.sserr = 0.0;
this.sumy = 0.0;
this.sumsqy = 0.0;
this.rss_set = false;
this.tol_set = false;
}
/**
* This sets up tolerances for singularity testing.
*/
private void tolset() {
int pos;
double total;
final double eps = this.epsilon;
for (int i = 0; i < nvars; i++) {
this.work_tolset[i] = FastMath.sqrt(d[i]);
}
tol[0] = eps * this.work_tolset[0];
for (int col = 1; col < nvars; col++) {
pos = col - 1;
total = work_tolset[col];
for (int row = 0; row < col; row++) {
total += FastMath.abs(r[pos]) * work_tolset[row];
pos += nvars - row - 2;
}
tol[col] = eps * total;
}
tol_set = true;
}
/**
* The regcf method conducts the linear regression and extracts the
* parameter vector. Notice that the algorithm can do subset regression
* with no alteration.
*
* @param nreq how many of the regressors to include (either in canonical
* order, or in the current reordered state)
* @return an array with the estimated slope coefficients
* @throws MathIllegalArgumentException if {@code nreq} is less than 1
* or greater than the number of independent variables
*/
private double[] regcf(int nreq) throws MathIllegalArgumentException {
int nextr;
if (nreq < 1) {
throw new MathIllegalArgumentException(LocalizedStatFormats.NO_REGRESSORS);
}
if (nreq > this.nvars) {
throw new MathIllegalArgumentException(
LocalizedStatFormats.TOO_MANY_REGRESSORS, nreq, this.nvars);
}
if (!this.tol_set) {
tolset();
}
final double[] ret = new double[nreq];
boolean rankProblem = false;
for (int i = nreq - 1; i > -1; i--) {
if (FastMath.sqrt(d[i]) < tol[i]) {
ret[i] = 0.0;
d[i] = 0.0;
rankProblem = true;
} else {
ret[i] = rhs[i];
nextr = i * (nvars + nvars - i - 1) / 2;
for (int j = i + 1; j < nreq; j++) {
ret[i] = smartAdd(ret[i], -r[nextr] * ret[j]);
++nextr;
}
}
}
if (rankProblem) {
for (int i = 0; i < nreq; i++) {
if (this.lindep[i]) {
ret[i] = Double.NaN;
}
}
}
return ret;
}
/**
* The method which checks for singularities and then eliminates the offending
* columns.
*/
private void singcheck() {
int pos;
for (int i = 0; i < nvars; i++) {
work_sing[i] = FastMath.sqrt(d[i]);
}
for (int col = 0; col < nvars; col++) {
// Set elements within R to zero if they are less than tol(col) in
// absolute value after being scaled by the square root of their row
// multiplier
final double temp = tol[col];
pos = col - 1;
for (int row = 0; row < col - 1; row++) {
if (FastMath.abs(r[pos]) * work_sing[row] < temp) {
r[pos] = 0.0;
}
pos += nvars - row - 2;
}
// If diagonal element is near zero, set it to zero, set appropriate
// element of LINDEP, and use INCLUD to augment the projections in
// the lower rows of the orthogonalization.
lindep[col] = false;
if (work_sing[col] < temp) {
lindep[col] = true;
if (col < nvars - 1) {
Arrays.fill(x_sing, 0.0);
int _pi = col * (nvars + nvars - col - 1) / 2;
for (int _xi = col + 1; _xi < nvars; _xi++, _pi++) {
x_sing[_xi] = r[_pi];
r[_pi] = 0.0;
}
final double y = rhs[col];
final double weight = d[col];
d[col] = 0.0;
rhs[col] = 0.0;
this.include(x_sing, weight, y);
} else {
sserr += d[col] * rhs[col] * rhs[col];
}
}
}
}
/**
* Calculates the sum of squared errors for the full regression
* and all subsets in the following manner: <pre>
* rss[] ={
* ResidualSumOfSquares_allNvars,
* ResidualSumOfSquares_FirstNvars-1,
* ResidualSumOfSquares_FirstNvars-2,
* ..., ResidualSumOfSquares_FirstVariable} </pre>
*/
private void ss() {
double total = sserr;
rss[nvars - 1] = sserr;
for (int i = nvars - 1; i > 0; i--) {
total += d[i] * rhs[i] * rhs[i];
rss[i - 1] = total;
}
rss_set = true;
}
/**
* Calculates the cov matrix assuming only the first nreq variables are
* included in the calculation. The returned array contains a symmetric
* matrix stored in lower triangular form. The matrix will have
* ( nreq + 1 ) * nreq / 2 elements. For illustration <pre>
* cov =
* {
* cov_00,
* cov_10, cov_11,
* cov_20, cov_21, cov22,
* ...
* } </pre>
*
* @param nreq how many of the regressors to include (either in canonical
* order, or in the current reordered state)
* @return an array with the variance covariance of the included
* regressors in lower triangular form
*/
private double[] cov(int nreq) {
if (this.nobs <= nreq) {
return null;
}
double rnk = 0.0;
for (int i = 0; i < nreq; i++) {
if (!this.lindep[i]) {
rnk += 1.0;
}
}
final double var = rss[nreq - 1] / (nobs - rnk);
final double[] rinv = new double[nreq * (nreq - 1) / 2];
inverse(rinv, nreq);
final double[] covmat = new double[nreq * (nreq + 1) / 2];
Arrays.fill(covmat, Double.NaN);
int pos2;
int pos1;
int start = 0;
for (int row = 0; row < nreq; row++) {
pos2 = start;
if (!this.lindep[row]) {
for (int col = row; col < nreq; col++) {
if (!this.lindep[col]) {
pos1 = start + col - row;
double total;
if (row == col) {
total = 1.0 / d[col];
} else {
total = rinv[pos1 - 1] / d[col];
}
for (int k = col + 1; k < nreq; k++) {
if (!this.lindep[k]) {
total += rinv[pos1] * rinv[pos2] / d[k];
}
++pos1;
++pos2;
}
covmat[ (col + 1) * col / 2 + row] = total * var;
} else {
pos2 += nreq - col - 1;
}
}
}
start += nreq - row - 1;
}
return covmat;
}
/**
* This internal method calculates the inverse of the upper-triangular portion
* of the R matrix.
* @param rinv the storage for the inverse of r
* @param nreq how many of the regressors to include (either in canonical
* order, or in the current reordered state)
*/
private void inverse(double[] rinv, int nreq) {
int pos = nreq * (nreq - 1) / 2 - 1;
Arrays.fill(rinv, Double.NaN);
for (int row = nreq - 1; row > 0; --row) {
if (!this.lindep[row]) {
final int start = (row - 1) * (nvars + nvars - row) / 2;
for (int col = nreq; col > row; --col) {
int pos1 = start;
int pos2 = pos;
double total = 0.0;
for (int k = row; k < col - 1; k++) {
pos2 += nreq - k - 1;
if (!this.lindep[k]) {
total += -r[pos1] * rinv[pos2];
}
++pos1;
}
rinv[pos] = total - r[pos1];
--pos;
}
} else {
pos -= nreq - row;
}
}
}
/**
* In the original algorithm only the partial correlations of the regressors
* is returned to the user. In this implementation, we have <pre>
* corr =
* {
* corrxx - lower triangular
* corrxy - bottom row of the matrix
* }
* Replaces subroutines PCORR and COR of:
* ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 </pre>
*
* <p>Calculate partial correlations after the variables in rows
* 1, 2, ..., IN have been forced into the regression.
* If IN = 1, and the first row of R represents a constant in the
* model, then the usual simple correlations are returned.</p>
*
* <p>If IN = 0, the value returned in array CORMAT for the correlation
* of variables Xi & Xj is: <pre>
* sum ( Xi.Xj ) / Sqrt ( sum (Xi^2) . sum (Xj^2) )</pre></p>
*
* <p>On return, array CORMAT contains the upper triangle of the matrix of
* partial correlations stored by rows, excluding the 1's on the diagonal.
* e.g. if IN = 2, the consecutive elements returned are:
* (3,4) (3,5) ... (3,ncol), (4,5) (4,6) ... (4,ncol), etc.
* Array YCORR stores the partial correlations with the Y-variable
* starting with YCORR(IN+1) = partial correlation with the variable in
* position (IN+1). </p>
*
* @param in how many of the regressors to include (either in canonical
* order, or in the current reordered state)
* @return an array with the partial correlations of the remainder of
* regressors with each other and the regressand, in lower triangular form
*/
public double[] getPartialCorrelations(int in) {
final double[] output = new double[(nvars - in + 1) * (nvars - in) / 2];
int pos;
int pos1;
int pos2;
final int rms_off = -in;
final int wrk_off = -(in + 1);
final double[] rms = new double[nvars - in];
final double[] work = new double[nvars - in - 1];
double sumxx;
double sumxy;
double sumyy;
final int offXX = (nvars - in) * (nvars - in - 1) / 2;
if (in < -1 || in >= nvars) {
return null;
}
final int nvm = nvars - 1;
final int base_pos = r.length - (nvm - in) * (nvm - in + 1) / 2;
if (d[in] > 0.0) {
rms[in + rms_off] = 1.0 / FastMath.sqrt(d[in]);
}
for (int col = in + 1; col < nvars; col++) {
pos = base_pos + col - 1 - in;
sumxx = d[col];
for (int row = in; row < col; row++) {
sumxx += d[row] * r[pos] * r[pos];
pos += nvars - row - 2;
}
if (sumxx > 0.0) {
rms[col + rms_off] = 1.0 / FastMath.sqrt(sumxx);
} else {
rms[col + rms_off] = 0.0;
}
}
sumyy = sserr;
for (int row = in; row < nvars; row++) {
sumyy += d[row] * rhs[row] * rhs[row];
}
if (sumyy > 0.0) {
sumyy = 1.0 / FastMath.sqrt(sumyy);
}
pos = 0;
for (int col1 = in; col1 < nvars; col1++) {
sumxy = 0.0;
Arrays.fill(work, 0.0);
pos1 = base_pos + col1 - in - 1;
for (int row = in; row < col1; row++) {
pos2 = pos1 + 1;
for (int col2 = col1 + 1; col2 < nvars; col2++) {
work[col2 + wrk_off] += d[row] * r[pos1] * r[pos2];
pos2++;
}
sumxy += d[row] * r[pos1] * rhs[row];
pos1 += nvars - row - 2;
}
pos2 = pos1 + 1;
for (int col2 = col1 + 1; col2 < nvars; col2++) {
work[col2 + wrk_off] += d[col1] * r[pos2];
++pos2;
output[ (col2 - 1 - in) * (col2 - in) / 2 + col1 - in] =
work[col2 + wrk_off] * rms[col1 + rms_off] * rms[col2 + rms_off];
++pos;
}
sumxy += d[col1] * rhs[col1];
output[col1 + rms_off + offXX] = sumxy * rms[col1 + rms_off] * sumyy;
}
return output;
}
/**
* ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2.
* Move variable from position FROM to position TO in an
* orthogonal reduction produced by AS75.1.
*
* @param from initial position
* @param to destination
*/
private void vmove(int from, int to) {
double d1;
double d2;
double X;
double d1new;
double d2new;
double cbar;
double sbar;
double Y;
int first;
int inc;
int m1;
int m2;
int mp1;
int pos;
boolean bSkipTo40 = false;
if (from == to) {
return;
}
if (!this.rss_set) {
ss();
}
final int count;
if (from < to) {
first = from;
inc = 1;
count = to - from;
} else {
first = from - 1;
inc = -1;
count = from - to;
}
int m = first;
int idx = 0;
while (idx < count) {
m1 = m * (nvars + nvars - m - 1) / 2;
m2 = m1 + nvars - m - 1;
mp1 = m + 1;
d1 = d[m];
d2 = d[mp1];
// Special cases.
if (d1 > this.epsilon || d2 > this.epsilon) {
X = r[m1];
if (FastMath.abs(X) * FastMath.sqrt(d1) < tol[mp1]) {
X = 0.0;
}
if (d1 < this.epsilon || FastMath.abs(X) < this.epsilon) {
d[m] = d2;
d[mp1] = d1;
r[m1] = 0.0;
for (int col = m + 2; col < nvars; col++) {
++m1;
X = r[m1];
r[m1] = r[m2];
r[m2] = X;
++m2;
}
X = rhs[m];
rhs[m] = rhs[mp1];
rhs[mp1] = X;
bSkipTo40 = true;
//break;
} else if (d2 < this.epsilon) {
d[m] = d1 * X * X;
r[m1] = 1.0 / X;
for (int _i = m1 + 1; _i < m1 + nvars - m - 1; _i++) {
r[_i] /= X;
}
rhs[m] /= X;
bSkipTo40 = true;
//break;
}
if (!bSkipTo40) {
d1new = d2 + d1 * X * X;
cbar = d2 / d1new;
sbar = X * d1 / d1new;
d2new = d1 * cbar;
d[m] = d1new;
d[mp1] = d2new;
r[m1] = sbar;
for (int col = m + 2; col < nvars; col++) {
++m1;
Y = r[m1];
r[m1] = cbar * r[m2] + sbar * Y;
r[m2] = Y - X * r[m2];
++m2;
}
Y = rhs[m];
rhs[m] = cbar * rhs[mp1] + sbar * Y;
rhs[mp1] = Y - X * rhs[mp1];
}
}
if (m > 0) {
pos = m;
for (int row = 0; row < m; row++) {
X = r[pos];
r[pos] = r[pos - 1];
r[pos - 1] = X;
pos += nvars - row - 2;
}
}
// Adjust variable order (VORDER), the tolerances (TOL) and
// the vector of residual sums of squares (RSS).
m1 = vorder[m];
vorder[m] = vorder[mp1];
vorder[mp1] = m1;
X = tol[m];
tol[m] = tol[mp1];
tol[mp1] = X;
rss[m] = rss[mp1] + d[mp1] * rhs[mp1] * rhs[mp1];
m += inc;
++idx;
}
}
/**
* ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2
*
* <p> Re-order the variables in an orthogonal reduction produced by
* AS75.1 so that the N variables in LIST start at position POS1,
* though will not necessarily be in the same order as in LIST.
* Any variables in VORDER before position POS1 are not moved.
* Auxiliary routine called: VMOVE. </p>
*
* <p>This internal method reorders the regressors.</p>
*
* @param list the regressors to move
* @param pos1 where the list will be placed
* @return -1 error, 0 everything ok
*/
private int reorderRegressors(int[] list, int pos1) {
int next;
int i;
int l;
if (list.length < 1 || list.length > nvars + 1 - pos1) {
return -1;
}
next = pos1;
i = pos1;
while (i < nvars) {
l = vorder[i];
for (int j = 0; j < list.length; j++) {
if (l == list[j] && i > next) {
this.vmove(i, next);
++next;
if (next >= list.length + pos1) {
return 0;
} else {
break;
}
}
}
++i;
}
return 0;
}
/**
* Gets the diagonal of the Hat matrix also known as the leverage matrix.
*
* @param row_data returns the diagonal of the hat matrix for this observation
* @return the diagonal element of the hatmatrix
*/
public double getDiagonalOfHatMatrix(double[] row_data) {
double[] wk = new double[this.nvars];
int pos;
double total;
if (row_data.length > nvars) {
return Double.NaN;
}
double[] xrow;
if (this.hasIntercept) {
xrow = new double[row_data.length + 1];
xrow[0] = 1.0;
System.arraycopy(row_data, 0, xrow, 1, row_data.length);
} else {
xrow = row_data;
}
double hii = 0.0;
for (int col = 0; col < xrow.length; col++) {
if (FastMath.sqrt(d[col]) < tol[col]) {
wk[col] = 0.0;
} else {
pos = col - 1;
total = xrow[col];
for (int row = 0; row < col; row++) {
total = smartAdd(total, -wk[row] * r[pos]);
pos += nvars - row - 2;
}
wk[col] = total;
hii = smartAdd(hii, (total * total) / d[col]);
}
}
return hii;
}
/**
* Gets the order of the regressors, useful if some type of reordering
* has been called. Calling regress with int[]{} args will trigger
* a reordering.
*
* @return int[] with the current order of the regressors
*/
public int[] getOrderOfRegressors(){
return vorder.clone();
}
/**
* Conducts a regression on the data in the model, using all regressors.
*
* @return RegressionResults the structure holding all regression results
* @exception MathIllegalArgumentException - thrown if number of observations is
* less than the number of variables
*/
@Override
public RegressionResults regress() throws MathIllegalArgumentException {
return regress(this.nvars);
}
/**
* Conducts a regression on the data in the model, using a subset of regressors.
*
* @param numberOfRegressors many of the regressors to include (either in canonical
* order, or in the current reordered state)
* @return RegressionResults the structure holding all regression results
* @exception MathIllegalArgumentException - thrown if number of observations is
* less than the number of variables or number of regressors requested
* is greater than the regressors in the model
*/
public RegressionResults regress(int numberOfRegressors) throws MathIllegalArgumentException {
if (this.nobs <= numberOfRegressors) {
throw new MathIllegalArgumentException(
LocalizedStatFormats.NOT_ENOUGH_DATA_FOR_NUMBER_OF_PREDICTORS,
this.nobs, numberOfRegressors);
}
if( numberOfRegressors > this.nvars ){
throw new MathIllegalArgumentException(
LocalizedStatFormats.TOO_MANY_REGRESSORS, numberOfRegressors, this.nvars);
}
tolset();
singcheck();
double[] beta = this.regcf(numberOfRegressors);
ss();
double[] cov = this.cov(numberOfRegressors);
int rnk = 0;
for (int i = 0; i < this.lindep.length; i++) {
if (!this.lindep[i]) {
++rnk;
}
}
boolean needsReorder = false;
for (int i = 0; i < numberOfRegressors; i++) {
if (this.vorder[i] != i) {
needsReorder = true;
break;
}
}
if (!needsReorder) {
return new RegressionResults(
beta, new double[][]{cov}, true, this.nobs, rnk,
this.sumy, this.sumsqy, this.sserr, this.hasIntercept, false);
} else {
double[] betaNew = new double[beta.length];
double[] covNew = new double[cov.length];
int[] newIndices = new int[beta.length];
for (int i = 0; i < nvars; i++) {
for (int j = 0; j < numberOfRegressors; j++) {
if (this.vorder[j] == i) {
betaNew[i] = beta[ j];
newIndices[i] = j;
}
}
}
int idx1 = 0;
int idx2;
int _i;
int _j;
for (int i = 0; i < beta.length; i++) {
_i = newIndices[i];
for (int j = 0; j <= i; j++, idx1++) {
_j = newIndices[j];
if (_i > _j) {
idx2 = _i * (_i + 1) / 2 + _j;
} else {
idx2 = _j * (_j + 1) / 2 + _i;
}
covNew[idx1] = cov[idx2];
}
}
return new RegressionResults(
betaNew, new double[][]{covNew}, true, this.nobs, rnk,
this.sumy, this.sumsqy, this.sserr, this.hasIntercept, false);
}
}
/**
* Conducts a regression on the data in the model, using regressors in array
* Calling this method will change the internal order of the regressors
* and care is required in interpreting the hatmatrix.
*
* @param variablesToInclude array of variables to include in regression
* @return RegressionResults the structure holding all regression results
* @exception MathIllegalArgumentException - thrown if number of observations is
* less than the number of variables, the number of regressors requested
* is greater than the regressors in the model or a regressor index in
* regressor array does not exist
*/
@Override
public RegressionResults regress(int[] variablesToInclude) throws MathIllegalArgumentException {
if (variablesToInclude.length > this.nvars) {
throw new MathIllegalArgumentException(
LocalizedStatFormats.TOO_MANY_REGRESSORS, variablesToInclude.length, this.nvars);
}
if (this.nobs <= this.nvars) {
throw new MathIllegalArgumentException(
LocalizedStatFormats.NOT_ENOUGH_DATA_FOR_NUMBER_OF_PREDICTORS,
this.nobs, this.nvars);
}
Arrays.sort(variablesToInclude);
int iExclude = 0;
for (int i = 0; i < variablesToInclude.length; i++) {
if (i >= this.nvars) {
throw new MathIllegalArgumentException(
LocalizedCoreFormats.INDEX_LARGER_THAN_MAX, i, this.nvars);
}
if (i > 0 && variablesToInclude[i] == variablesToInclude[i - 1]) {
variablesToInclude[i] = -1;
++iExclude;
}
}
int[] series;
if (iExclude > 0) {
int j = 0;
series = new int[variablesToInclude.length - iExclude];
for (int i = 0; i < variablesToInclude.length; i++) {
if (variablesToInclude[i] > -1) {
series[j] = variablesToInclude[i];
++j;
}
}
} else {
series = variablesToInclude;
}
reorderRegressors(series, 0);
tolset();
singcheck();
double[] beta = this.regcf(series.length);
ss();
double[] cov = this.cov(series.length);
int rnk = 0;
for (int i = 0; i < this.lindep.length; i++) {
if (!this.lindep[i]) {
++rnk;
}
}
boolean needsReorder = false;
for (int i = 0; i < this.nvars; i++) {
if (this.vorder[i] != series[i]) {
needsReorder = true;
break;
}
}
if (!needsReorder) {
return new RegressionResults(
beta, new double[][]{cov}, true, this.nobs, rnk,
this.sumy, this.sumsqy, this.sserr, this.hasIntercept, false);
} else {
double[] betaNew = new double[beta.length];
int[] newIndices = new int[beta.length];
for (int i = 0; i < series.length; i++) {
for (int j = 0; j < this.vorder.length; j++) {
if (this.vorder[j] == series[i]) {
betaNew[i] = beta[ j];
newIndices[i] = j;
}
}
}
double[] covNew = new double[cov.length];
int idx1 = 0;
int idx2;
int _i;
int _j;
for (int i = 0; i < beta.length; i++) {
_i = newIndices[i];
for (int j = 0; j <= i; j++, idx1++) {
_j = newIndices[j];
if (_i > _j) {
idx2 = _i * (_i + 1) / 2 + _j;
} else {
idx2 = _j * (_j + 1) / 2 + _i;
}
covNew[idx1] = cov[idx2];
}
}
return new RegressionResults(
betaNew, new double[][]{covNew}, true, this.nobs, rnk,
this.sumy, this.sumsqy, this.sserr, this.hasIntercept, false);
}
}
}
| 35.761046
| 106
| 0.498021
|
ddfe4c89efecb38354809c5d05201f4929754527
| 15,889
|
package org.nypl.simplified.opds.core;
import com.io7m.jfunctional.Option;
import com.io7m.jfunctional.OptionType;
import com.io7m.jnull.NullCheck;
import com.io7m.junreachable.UnreachableCodeException;
import org.joda.time.DateTime;
import org.joda.time.format.ISODateTimeFormat;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
/**
* Convenient XML handling functions.
*/
public final class OPDSXML
{
private OPDSXML()
{
throw new UnreachableCodeException();
}
/**
* Return all child elements of {@code node} that have name {@code name} in
* namespace {@code namespace}.
*
* @param node The parent node
* @param namespace The namespace
* @param name The element name
*
* @return A list of elements
*/
public static List<Element> getChildElementsWithName(
final Element node,
final URI namespace,
final String name)
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final String namespace_text = namespace.toString();
final NodeList children = node.getChildNodes();
final List<Element> xs = new ArrayList<Element>(children.getLength());
for (int index = 0; index < children.getLength(); ++index) {
final Node child = children.item(index);
if (child instanceof Element) {
final Element child_element = (Element) child;
final String child_namespace = child_element.getNamespaceURI();
final String child_name = child_element.getLocalName();
if (child_namespace.equals(namespace_text) && child_name.equals(name)) {
xs.add(child_element);
}
}
}
return xs;
}
/**
* Return all child elements of {@code node} that have name {@code name} in
* namespace {@code namespace}.
*
* @param node The parent node
* @param namespace The namespace
* @param name The element name
*
* @return A list of elements
*
* @throws OPDSParseException If there are no matching elements
*/
public static List<Element> getChildElementsWithNameNonEmpty(
final Element node,
final URI namespace,
final String name)
throws OPDSParseException
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final List<Element> elements = getChildElementsWithName(node, namespace, name);
if (!elements.isEmpty()) {
return elements;
}
final StringBuilder m = new StringBuilder(128);
m.append("Missing at least one required element.\n");
m.append("Expected namespace: ");
m.append(namespace);
m.append("\n");
m.append("Expected name: ");
m.append(name);
m.append("\n");
throw new OPDSParseException(NullCheck.notNull(m.toString()));
}
/**
* Return the text of the first child element of {@code node} that has name
* {@code name} in namespace {@code namespace}.
*
* @param node The node
* @param namespace The child namespace
* @param name The child name
*
* @return The text of the child element
*
* @throws OPDSParseException If there are no matching child elements
*/
public static String getFirstChildElementTextWithName(
final Element node,
final URI namespace,
final String name)
throws OPDSParseException
{
final Element e = OPDSXML.getFirstChildElementWithName(node, namespace, name);
return NullCheck.notNull(e.getTextContent().trim());
}
/**
* Return the (optional) text of the first child element of {@code node} that
* has name {@code name} in namespace {@code namespace}.
*
* @param node The node
* @param namespace The child namespace
* @param name The child name
*
* @return The text of the child element, if any
*/
public static OptionType<String> getFirstChildElementTextWithNameOptional(
final Element node,
final URI namespace,
final String name)
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final NodeList children = node.getChildNodes();
for (int index = 0; index < children.getLength(); ++index) {
final Node child = NullCheck.notNull(children.item(index));
if (child instanceof Element) {
if (OPDSXML.nodeHasName((Element) child, namespace, name)) {
final String text = child.getTextContent();
return Option.some(NullCheck.notNull(text.trim()));
}
}
}
return Option.none();
}
/**
* Return the (optional) text of the first child element of {@code node} that
* has name {@code name} in namespace {@code namespace}.
*
* @param node The node
* @param namespace The child namespace
* @param name The child name
* @param attribute The child name
*
* @return The text of the child element, if any
*/
public static String getFirstChildElementTextWithName(
final Element node,
final URI namespace,
final String name,
final String attribute)
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final NodeList children = node.getChildNodes();
for (int index = 0; index < children.getLength(); ++index) {
final Node child = NullCheck.notNull(children.item(index));
if (child instanceof Element) {
if (OPDSXML.nodeHasName((Element) child, namespace, name)) {
final String text = ((Element) child).getAttributes().getNamedItemNS(namespace.toString(), attribute).getNodeValue();
return NullCheck.notNull(text.trim());
}
}
}
return "";
}
/**
* Return the first child element of {@code node} that has name {@code name}
* in namespace {@code namespace}.
*
* @param node The node
* @param namespace The child namespace
* @param name The child name
*
* @return The child element
*
* @throws OPDSParseException If no matching element exists
*/
public static Element getFirstChildElementWithName(
final Element node,
final URI namespace,
final String name)
throws OPDSParseException
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final NodeList children = node.getChildNodes();
for (int index = 0; index < children.getLength(); ++index) {
final Node child = NullCheck.notNull(children.item(index));
if (child instanceof Element) {
if (OPDSXML.nodeHasName((Element) child, namespace, name)) {
return (Element) child;
}
}
}
final StringBuilder m = new StringBuilder(128);
m.append("Expected required element.\n");
m.append("Expected namespace: ");
m.append(namespace);
m.append("\n");
m.append("Expected name: ");
m.append(name);
m.append("\n");
throw new OPDSParseException(NullCheck.notNull(m.toString()));
}
/**
* Return the first child element of {@code node} that has name {@code name}
* in namespace {@code namespace}, if any.
*
* @param node The node
* @param namespace The child namespace
* @param name The child name
*
* @return The child element, if any
*/
public static OptionType<Element> getFirstChildElementWithNameOptional(
final Element node,
final URI namespace,
final String name)
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final NodeList children = node.getChildNodes();
for (int index = 0; index < children.getLength(); ++index) {
final Node child = NullCheck.notNull(children.item(index));
if (child instanceof Element) {
if (OPDSXML.nodeHasName((Element) child, namespace, name)) {
return Option.some((Element) child);
}
}
}
return Option.none();
}
/**
* @param e The element
*
* @return The namespace of the given element, if any
*/
public static OptionType<String> getNodeNamespace(
final Element e)
{
NullCheck.notNull(e);
final String ns = e.getNamespaceURI();
if (ns != null) {
return Option.some(ns);
}
return Option.none();
}
/**
* Cast the given node to an {@link Element}, raising an exception if it is
* not an element.
*
* @param node The node
*
* @return The node as an element
*
* @throws OPDSParseException If the node is not an {@link Element}
*/
public static Element nodeAsElement(
final Node node)
throws OPDSParseException
{
NullCheck.notNull(node);
if ((node instanceof Element) == false) {
final StringBuilder m = new StringBuilder(128);
m.append("Expected element but got node of type ");
m.append(node.getNodeName());
throw new OPDSParseException(NullCheck.notNull(m.toString()));
}
return (Element) node;
}
/**
* Cast the given node to an {@link Element}, raising an exception if it is
* not an element and/or does not have the given {@code name} and {@code
* namespace}.
*
* @param node The node
* @param name The expected element name
* @param namespace The expected element namespace
*
* @return The node as an element
*
* @throws OPDSParseException If the node is not an {@link Element} or has the
* wrong name
*/
public static Element nodeAsElementWithName(
final Node node,
final URI namespace,
final String name)
throws OPDSParseException
{
NullCheck.notNull(node);
NullCheck.notNull(namespace);
NullCheck.notNull(name);
final Element e = OPDSXML.nodeAsElement(node);
if (OPDSXML.nodeHasName(e, namespace, name)) {
return e;
}
final StringBuilder m = new StringBuilder(128);
m.append("Missing required element.\n");
m.append("Expected namespace: ");
m.append(namespace);
m.append("\n");
m.append("Expected name: ");
m.append(name);
m.append("\n");
m.append("Got namespace: ");
m.append(OPDSXML.getNodeNamespace(e));
m.append("\n");
m.append("Got name: ");
m.append(e.getNodeName());
m.append("\n");
throw new OPDSParseException(NullCheck.notNull(m.toString()));
}
/**
* @param node The element
* @param namespace The namespace
* @param name The name
*
* @return {@code true} if the given element has the given name and namespace
*/
public static boolean nodeHasName(
final Element node,
final URI namespace,
final String name)
{
final String node_local = node.getLocalName();
if (node_local.equals(name)) {
return namespace.toString().equals(node.getNamespaceURI());
}
return false;
}
/**
* Parse the contents of attribute {@code name} of element {@code e} as an
* RFC3339 date, if the attribute exists.
*
* @param e The element
* @param name The attribute name
*
* @return A date, if any
*
* @throws OPDSParseException On parse errors
*/
public static OptionType<DateTime> getAttributeRFC3339Optional(
final Element e,
final String name)
throws OPDSParseException
{
NullCheck.notNull(e);
NullCheck.notNull(name);
try {
final OptionType<DateTime> end_date;
if (e.hasAttribute(name)) {
return Option.some(ISODateTimeFormat.dateTimeParser().parseDateTime(e.getAttribute(name)));
}
return Option.none();
} catch (final IllegalArgumentException x) {
throw new OPDSParseException(x);
}
}
/**
* Parse the contents of attribute {@code name} of element {@code e} as an
* RFC3339 date.
*
* @param e The element
* @param name The attribute name
*
* @return A date
*
* @throws OPDSParseException On parse errors
*/
public static DateTime getAttributeRFC3339(
final Element e,
final String name)
throws OPDSParseException
{
NullCheck.notNull(e);
NullCheck.notNull(name);
final OptionType<DateTime> end_date;
if (e.hasAttribute(name)) {
try {
return ISODateTimeFormat.dateTimeParser().parseDateTime(e.getAttribute(name));
} catch (final IllegalArgumentException x) {
throw new OPDSParseException(x);
}
}
final StringBuilder m = new StringBuilder(128);
m.append("Expected required attribute.\n");
m.append("Expected name: ");
m.append(name);
m.append("\n");
throw new OPDSParseException(NullCheck.notNull(m.toString()));
}
/**
* Convenient function to serialize the given document to the given output
* stream.
*
* @param d The document
* @param o The output stream
*
* @throws OPDSSerializationException If any errors occur on serialization
*/
public static void serializeDocumentToStream(
final Document d,
final OutputStream o)
throws OPDSSerializationException
{
NullCheck.notNull(d);
NullCheck.notNull(o);
try {
final TransformerFactory tf =
NullCheck.notNull(TransformerFactory.newInstance());
final Transformer t = NullCheck.notNull(tf.newTransformer());
t.setOutputProperty(OutputKeys.INDENT, "yes");
t.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
final DOMSource source = new DOMSource(d);
final StreamResult target = new StreamResult(o);
t.transform(source, target);
} catch (final TransformerConfigurationException ex) {
throw new OPDSSerializationException(ex);
} catch (final TransformerFactoryConfigurationError ex) {
throw new OPDSSerializationException(ex);
} catch (final TransformerException ex) {
throw new OPDSSerializationException(ex);
}
}
/**
* Parse the contents of attribute {@code name} of element {@code e} as an
* integer, if the attribute exists.
*
* @param e The element
* @param name The attribute name
*
* @return An integer, if any
*
* @throws OPDSParseException On parse errors
*/
public static OptionType<Integer> getAttributeIntegerOptional(
final Element e,
final String name)
throws OPDSParseException
{
NullCheck.notNull(e);
NullCheck.notNull(name);
if (e.hasAttribute(name)) {
try {
return Option.some(Integer.valueOf(e.getAttribute(name)));
} catch (final NumberFormatException x) {
throw new OPDSParseException(x);
}
}
return Option.none();
}
/**
* Parse the contents of attribute {@code name} of element {@code e} as an
* integer.
*
* @param e The element
* @param name The attribute name
*
* @return An integer
*
* @throws OPDSParseException On parse errors
*/
public static int getAttributeInteger(
final Element e,
final String name)
throws OPDSParseException
{
NullCheck.notNull(e);
NullCheck.notNull(name);
if (e.hasAttribute(name)) {
try {
return Integer.valueOf(e.getAttribute(name));
} catch (final NumberFormatException x) {
throw new OPDSParseException(x);
}
}
final StringBuilder m = new StringBuilder(128);
m.append("Expected required attribute.\n");
m.append("Expected name: ");
m.append(name);
m.append("\n");
throw new OPDSParseException(NullCheck.notNull(m.toString()));
}
}
| 27.681185
| 127
| 0.657499
|
bb1a99c2c28f376d0c6508bcaf7002c2acf9bf72
| 7,436
|
package de.renew.plugin.command;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import de.renew.plugin.IPlugin;
import de.renew.plugin.PluginAdapter;
import de.renew.plugin.PluginManager;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URL;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* This command displays a list of the previously loaded plugins in the system.
*
* @author Jörn Schumacher
*
*/
public class ListCommand implements CLCommand {
public static org.apache.log4j.Logger logger = org.apache.log4j.Logger
.getLogger(InfoCommand.class);
/**
* Prints the list of loaded plugins to the <code>response</code> stream.
*
* @param args
* {@inheritDoc}
* @param response
* {@inheritDoc}
*/
public void execute(String[] args, PrintStream response) {
CommandLineParser parser = new DefaultParser();
Options opts = new Options();
Option ordered = new Option("o", "ordered", false, "Print ordered list.");
opts.addOption(ordered);
Option extended = new Option("l", "long", false,
"Display more information about every plugin.");
opts.addOption(extended);
Option libs = new Option("j", "jar", false,
"List locations of all jar files.");
opts.addOption(libs);
Option help = new Option("h", "help", false, "Print this message.");
opts.addOption(help);
CommandLine line = null;
try {
line = parser.parse(opts, args);
} catch (ParseException e1) {
HelpFormatter formatter = new HelpFormatter();
PrintWriter writer = new PrintWriter(response, true);
formatter.printHelp(writer, formatter.getWidth(), "list", null,
opts, formatter.getLeftPadding(),
formatter.getDescPadding(), null, true);
return;
}
if (line.hasOption(help.getOpt())) {
HelpFormatter formatter = new HelpFormatter();
formatter.setArgName("Plugin Name Fragment");
String header = "List loaded Renew plugins.";
String footer = "Plugin name fragment can be any part of the plugin name.\nPlease, escape white spaces.";
PrintWriter writer = new PrintWriter(response, true);
formatter.printHelp(writer, formatter.getWidth(),
"list [options] [plugin name fragment]",
header, opts, formatter.getLeftPadding(),
formatter.getDescPadding(), footer, true);
return;
}
String pluginNameContains = null;
if (line.getArgs() != null && line.getArgs().length > 0) {
pluginNameContains = line.getArgs()[0]; //CLCommandHelper.getPluginName(line.getArgs());
}
// get Plugin name from input
if (line.hasOption(libs.getOpt())) {
Iterator<IPlugin> it = PluginManager.getInstance().getPlugins()
.iterator();
response.println("Plugin JAR-file locations:");
while (it.hasNext()) {
IPlugin next = it.next();
if (skip(next, pluginNameContains)) {
continue;
}
response.println(next.getProperties().getURL());
}
response.println("Libraries JAR-file locations:");
URL[] urls = PluginManager.getInstance().getLibs();
for (URL url : urls) {
response.println(url);
}
return;
}
List<IPlugin> list = PluginManager.getInstance().getPlugins();
if (line.hasOption(ordered.getOpt())) {
java.util.Collections.sort(list, new PluginNameComparator());
}
if (line.hasOption(extended.getOpt())) {
try {
Iterator<IPlugin> it = list.iterator();
while (it.hasNext()) {
IPlugin plugin = it.next();
if (skip(plugin, pluginNameContains)) {
continue;
}
String versionstring = "";
if (plugin instanceof PluginAdapter) {
PluginAdapter pa = (PluginAdapter) plugin;
versionstring = pa.getVersion();
}
response.println(plugin.getProperties()
.getFilteredProperty(".date")
+ "\t "
+ plugin.getProperties()
.getFilteredProperty(".user")
+ "\t " + versionstring + "\t "
+ plugin.getName());
}
} catch (ArrayIndexOutOfBoundsException e) {
} catch (NumberFormatException e) {
response.println("cleanup canceled.");
} catch (Exception e) {
response.println("cleanup canceled: " + e + "; "
+ e.getMessage());
logger.error(e.getMessage(), e);
}
return;
}
Iterator<IPlugin> it = list.iterator();
while (it.hasNext()) {
IPlugin plugin = it.next();
if (skip(plugin, pluginNameContains)) {
continue;
}
response.println(plugin);
}
}
private boolean skip(IPlugin plugin, String searchText) {
if (searchText == null || searchText.trim().length() == 0) {
return false;
}
String search = searchText.trim().toLowerCase();
if (plugin.getName() != null
&& plugin.getName().toLowerCase().contains(search)) {
return false;
}
if (plugin.getAlias() != null
&& plugin.getAlias().toLowerCase().contains(search)) {
return false;
}
return true;
}
public String getDescription() {
return "lists all loaded plugins. Options: [-l| --long] for more information; "
+ "[-c| --comment] to show compile comments; "
+ "[-j|--jar] to show all JAR file locations;"
+ "[-o] to show ordered list.";
}
/**
* Compares <code>IPlugin</code>s according to their nanes.
* @author Lawrence Cabac
*
*/
public class PluginNameComparator implements Comparator<IPlugin> {
public int compare(IPlugin o1, IPlugin o2) {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareTo(name2);
}
}
/**
* @see de.renew.plugin.command.CLCommand#getArguments()
*/
@Override
public String getArguments() {
return "(--ordered|--long|--jar|--help) (--ordered|--long)";
}
}
| 38.133333
| 117
| 0.52851
|
161ec1a3af9ddd0d3f3867086875036e69426a90
| 1,233
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/resourcemanager/v3/tag_bindings.proto
package com.google.cloud.resourcemanager.v3;
public interface DeleteTagBindingRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.cloud.resourcemanager.v3.DeleteTagBindingRequest)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. The name of the TagBinding. This is a String of the form:
* `tagBindings/{id}` (e.g.
* `tagBindings/%2F%2Fcloudresourcemanager.googleapis.com%2Fprojects%2F123/tagValues/456`).
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The name.
*/
java.lang.String getName();
/**
* <pre>
* Required. The name of the TagBinding. This is a String of the form:
* `tagBindings/{id}` (e.g.
* `tagBindings/%2F%2Fcloudresourcemanager.googleapis.com%2Fprojects%2F123/tagValues/456`).
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for name.
*/
com.google.protobuf.ByteString
getNameBytes();
}
| 36.264706
| 118
| 0.693431
|
a9dcc3bad7d925b1c6199c362dcc620789c5b733
| 947
|
package com.twinkle.framework.ruleengine.rule.support;
import java.io.ByteArrayOutputStream;
import java.security.MessageDigest;
/**
* Function: TODO ADD FUNCTION. <br/>
* Reason: TODO ADD REASON. <br/>
* Date: 10/7/19 11:08 PM<br/>
*
* @author chenxj
* @see
* @since JDK 1.8
*/
public class ConcatenateDigest extends MessageDigest {
private final ByteArrayOutputStream bos = new ByteArrayOutputStream();
public ConcatenateDigest() {
super("");
}
@Override
protected byte[] engineDigest() {
byte[] var1 = this.bos.toByteArray();
this.engineReset();
return var1;
}
@Override
protected void engineReset() {
this.bos.reset();
}
@Override
protected void engineUpdate(byte var1) {
this.bos.write(var1);
}
@Override
protected void engineUpdate(byte[] var1, int var2, int var3) {
this.bos.write(var1, var2, var3);
}
}
| 21.522727
| 74
| 0.632524
|
ba23987a3d23c3ffb6d0c309abc6954205532be2
| 920
|
package pl.cwanix.opensun.agentserver.packets.processors.zone;
import io.netty.channel.ChannelHandlerContext;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import pl.cwanix.opensun.agentserver.packets.c2s.zone.C2SAskVillageMovePacket;
import pl.cwanix.opensun.agentserver.packets.s2c.sync.S2CAnsPlayerEnterPacket;
import pl.cwanix.opensun.agentserver.packets.s2c.zone.S2CAnsVillageMovePacket;
import pl.cwanix.opensun.commonserver.packets.SUNPacketProcessor;
import pl.cwanix.opensun.commonserver.packets.annotations.PacketProcessor;
@Slf4j
@RequiredArgsConstructor
@PacketProcessor(packetClass = C2SAskVillageMovePacket.class)
public class C2SAskVillageMoveProcessor implements SUNPacketProcessor<C2SAskVillageMovePacket> {
@Override
public void process(ChannelHandlerContext ctx, C2SAskVillageMovePacket packet) {
ctx.writeAndFlush(new S2CAnsVillageMovePacket());
}
}
| 41.818182
| 96
| 0.846739
|
09a940de1398a73d40f3bd80612e2a0c4eac2668
| 1,348
|
package Arithmetics.sorters.InsertSort;
import Arithmetics.sorters.Sorter;
import targetVO.Member;
import java.util.Comparator;
import java.util.Random;
/**
* ***************************************************************************
* Description :
* Author : cxx
* Creation date: 2018/9/18.
* Version : 1.0
* ***************************************************************************
*/
public class InsertSortTest {
public static void main(String[] args) {
Sorter insertSort = new InsertSort();
Member members[] = new Member[10];
for (int i = 0; i < 10; i++) {
int x = new Random().nextInt(20);
members[i] = new Member(x, "A" + x + "BC");
}
printArr(members);
// insertSort.sort(members); //效果和下面方法一样
insertSort.sort(members, new Comparator<Member>() {
@Override
public int compare(Member o1, Member o2) {
return o1.getName().compareToIgnoreCase(o2.getName());
}
});
printArr(members);
}
public static void printArr(Object[] arr) {
StringBuilder buffer = new StringBuilder("");
for (int i = 0; i < arr.length; i++) {
buffer.append(" ").append(arr[i].toString());
}
System.out.println(buffer.toString());
}
}
| 29.955556
| 78
| 0.497774
|
cbb21f3d571e5e4eeb04352d1c0bdbf0257e5b1e
| 4,657
|
/*-
* #%L
* anchor-image-voxel
* %%
* Copyright (C) 2010 - 2022 Owen Feehan, ETH Zurich, University of Zurich, Hoffmann-La Roche
* %%
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
* #L%
*/
package org.anchoranalysis.image.voxel.projection;
import lombok.AllArgsConstructor;
import org.anchoranalysis.core.exception.OperationFailedException;
import org.anchoranalysis.core.functional.checked.CheckedBiFunction;
import org.anchoranalysis.image.voxel.Voxels;
import org.anchoranalysis.image.voxel.buffer.ProjectableBuffer;
import org.anchoranalysis.image.voxel.datatype.VoxelDataType;
import org.anchoranalysis.image.voxel.factory.VoxelsFactory;
import org.anchoranalysis.image.voxel.iterator.IterateVoxelsAll;
import org.anchoranalysis.math.arithmetic.Counter;
import org.anchoranalysis.spatial.box.Extent;
/**
* Creates a {@link ProjectableBuffer} and calculates the projection after adding three {@link
* Voxels}.
*
* <p>It has a fixed 2x2x2 size, giving 8 voxels.
*
* <p>The <i>first</i> buffer added ranges (0,7)
*
* <p>The <i>second</i> buffer added ranges (10,17)
*
* <p>The <i>third</i> buffer added ranges (20,27)
*
* @param <T> buffer-type
* @author Owen Feehan
*/
@AllArgsConstructor
public class ProjectableBufferFixture<T> {
/** The size of a small 3D buffer with 8 voxels in total. */
public static final Extent EXTENT = new Extent(2, 2, 2);
/** The starting value of the <b>first</b> buffer's sequence. */
public static final int INTIIAL_VALUE_FIRST_BUFFER = 0;
/** The starting value of the <b>third</b> buffer's sequence. */
public static final int INTIIAL_VALUE_SECOND_BUFFER = 10;
/** The starting value of the <b>third</b> buffer's sequence. */
public static final int INTIIAL_VALUE_THIRD_BUFFER = 20;
/** The data-type used to create the buffers. */
private VoxelDataType voxelDataType;
/**
* Calculates the eventual projection after creating and adding the buffers, as described in the
* class description.
*
* @param projectableBufferCreator creates the {@link ProjectableBuffer} to use.
* @return the added values.
* @throws OperationFailedException if thrown by {@code projectableBufferCreator}.
*/
public Voxels<T> calculate(
CheckedBiFunction<VoxelDataType, Extent, ProjectableBuffer<T>, OperationFailedException>
projectableBufferCreator)
throws OperationFailedException {
ProjectableBuffer<T> projectableBuffer =
projectableBufferCreator.apply(voxelDataType, EXTENT);
addBuffers(projectableBuffer);
return projectableBuffer.completeProjection();
}
/** Adds the three buffers. */
private void addBuffers(ProjectableBuffer<T> projectableBuffer) {
projectableBuffer.addVoxels(createBufferUnsignedByte(INTIIAL_VALUE_FIRST_BUFFER));
projectableBuffer.addVoxels(createBufferUnsignedByte(INTIIAL_VALUE_SECOND_BUFFER));
projectableBuffer.addVoxels(createBufferUnsignedByte(INTIIAL_VALUE_THIRD_BUFFER));
}
/** Create a {@link Voxels} with incrementing values, starting at {@code startingValue}. */
private Voxels<T> createBufferUnsignedByte(int startingValue) {
@SuppressWarnings("unchecked")
Voxels<T> voxels =
(Voxels<T>) VoxelsFactory.instance().createEmpty(EXTENT, voxelDataType).any();
Counter counter = new Counter(startingValue);
IterateVoxelsAll.withVoxelBuffer(
voxels,
(point, buffer, offset) -> buffer.putInt(offset, counter.incrementReturn()));
return voxels;
}
}
| 42.724771
| 100
| 0.727292
|
7d1a1e0bcece82d9b1a39f814ac5cbe552647050
| 6,406
|
package com.paymybuddy.business;
import com.paymybuddy.api.model.collection.ListResponse;
import com.paymybuddy.api.model.collection.PageResponse;
import com.paymybuddy.api.model.user.User;
import com.paymybuddy.business.exception.ContactNotFoundException;
import com.paymybuddy.business.exception.IsHimselfException;
import com.paymybuddy.business.mapper.UserBalanceMapperImpl;
import com.paymybuddy.business.mapper.UserMapper;
import com.paymybuddy.business.mapper.UserMapperImpl;
import com.paymybuddy.business.mock.MockUsers;
import com.paymybuddy.business.mock.TestBusinessConfig;
import com.paymybuddy.persistence.entity.UserContactEntity;
import com.paymybuddy.persistence.entity.UserEntity;
import com.paymybuddy.persistence.repository.UserContactRepository;
import com.paymybuddy.persistence.repository.UserRepository;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Import;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@SpringBootTest(classes = {ContactService.class, UserService.class, UserMapperImpl.class, UserBalanceMapperImpl.class})
@Import(TestBusinessConfig.class)
class ContactServiceTest {
@MockBean
private UserContactRepository userContactRepository;
@MockBean
private UserRepository userRepository;
@Autowired
private UserMapper userMapper;
@Autowired
private ContactService contactService;
@Test
void listContacts() {
when(userContactRepository.findContactByUserId(eq(1L), any())).thenAnswer(m -> {
Pageable pageable = m.getArgument(1);
List<UserEntity> content = IntStream.range(0, pageable.getPageSize()).boxed()
.map(id -> MockUsers.newUserEntity(id + 10L))
.collect(Collectors.toList());
return new PageImpl<>(content, pageable, pageable.getPageSize() * 2L + 3L);
});
com.paymybuddy.business.pageable.PageRequest req = new com.paymybuddy.business.pageable.PageRequest();
req.setPage(2);
req.setPageSize(5);
req.setPageSort(Arrays.asList("-email", "name"));
PageResponse<User> res = contactService.listContacts(1L, req);
assertEquals(2, res.getPage());
assertEquals(5, res.getPageSize());
assertEquals(3, res.getPageCount());
assertEquals(15, res.getTotalCount());
assertEquals(5, res.getRecords().size());
}
@Test
void searchContacts() {
ListResponse<User> res = contactService.searchContacts(1L, "t_e%s_t%", 5);
assertTrue(res.getRecords().isEmpty());
verify(userContactRepository, times(1)).searchContact(1L, "%t\\_e\\%s\\_t\\%%", PageRequest.of(0, 5));
}
@Test
void isContact() {
when(userContactRepository.existsById(new UserContactEntity.Key(1L, 2L))).thenReturn(true);
assertTrue(contactService.isContact(1L, 2L));
assertFalse(contactService.isContact(1L, 3L));
}
@Test
void addContact() {
UserEntity user1 = MockUsers.newUserEntity(1L);
UserEntity user2 = MockUsers.newUserEntity(2L);
when(userRepository.findByEmail(user1.getEmail())).thenReturn(Optional.of(user1));
when(userRepository.findByEmail(user2.getEmail())).thenReturn(Optional.of(user2));
assertEquals(userMapper.toContact(user2), contactService.addContact(user1.getId(), user2.getEmail()));
assertThrows(ContactNotFoundException.class, () -> contactService.addContact(user1.getId(), "unknown@domain.com"));
assertThrows(IsHimselfException.class, () -> contactService.addContact(user1.getId(), user1.getEmail()));
}
@Test
void removeContact() {
UserEntity user1 = MockUsers.newUserEntity(1L);
UserEntity user2 = MockUsers.newUserEntity(2L);
when(userContactRepository.findById(new UserContactEntity.Key(user1.getId(), user2.getId()))).thenAnswer(m -> {
UserContactEntity e = new UserContactEntity();
e.setUser(user1);
e.setContact(user2);
return Optional.of(e);
});
assertEquals(userMapper.toContact(user2), contactService.removeContact(user1.getId(), user2.getId()));
assertThrows(ContactNotFoundException.class, () -> contactService.removeContact(user1.getId(), 3L));
}
@Test
void getContact() {
UserEntity user1 = MockUsers.newUserEntity(1L);
UserEntity user2 = MockUsers.newUserEntity(2L);
when(userRepository.findByEmail(user1.getEmail())).thenReturn(Optional.of(user1));
when(userRepository.findByName(user1.getName())).thenReturn(Optional.of(user1));
when(userRepository.findByEmail(user2.getEmail())).thenReturn(Optional.of(user2));
when(userRepository.findByName(user2.getName())).thenReturn(Optional.of(user2));
when(userContactRepository.existsById(new UserContactEntity.Key(user1.getId(), user2.getId()))).thenReturn(true);
assertEquals(userMapper.toContact(user2), contactService.getContact(user1.getId(), user2.getEmail()));
assertEquals(userMapper.toContact(user2), contactService.getContact(user1.getId(), user2.getName()));
assertNull(contactService.getContact(user1.getId(), user1.getEmail()));
assertNull(contactService.getContact(user1.getId(), user1.getName()));
assertNull(contactService.getContact(user1.getId(), "unknown@email.com"));
assertNull(contactService.getContact(user1.getId(), "unknown"));
}
}
| 46.759124
| 123
| 0.732907
|
9381656ac2681bac3c5f2c1464adea2edcd1a5ed
| 1,773
|
package eu.bcvsolutions.idm.core.scheduler.entity;
import java.util.UUID;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Index;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import eu.bcvsolutions.idm.core.api.domain.DefaultFieldLengths;
import eu.bcvsolutions.idm.core.api.entity.AbstractEntity;
/**
* Task depends on another task identified by quartz task end.
*
* @author Radek Tomiška
*
*/
@Entity
@Table(name = "idm_dependent_task_trigger", indexes = {
@Index(name = "idx_idm_dependent_t_init", columnList = "initiator_task_id"),
@Index(name = "idx_idm_dependent_t_dep", columnList = "dependent_task_id")})
public class IdmDependentTaskTrigger extends AbstractEntity {
private static final long serialVersionUID = 1L;
@NotNull
@Column(name = "initiator_task_id", length = DefaultFieldLengths.NAME)
private String initiatorTaskId; // quartz job name - default group is supported now
@NotNull
@Column(name = "dependent_task_id", length = DefaultFieldLengths.NAME)
private String dependentTaskId; // quartz job name - default group is supported now
public IdmDependentTaskTrigger() {
}
public IdmDependentTaskTrigger(UUID id) {
super(id);
}
public IdmDependentTaskTrigger(String initiatorTaskId, String dependentTaskId) {
this.initiatorTaskId = initiatorTaskId;
this.dependentTaskId = dependentTaskId;
}
public String getInitiatorTaskId() {
return initiatorTaskId;
}
public void setInitiatorTaskId(String initiatorTaskId) {
this.initiatorTaskId = initiatorTaskId;
}
public String getDependentTaskId() {
return dependentTaskId;
}
public void setDependentTaskId(String dependentTaskId) {
this.dependentTaskId = dependentTaskId;
}
}
| 27.276923
| 84
| 0.77947
|
b7c9d6d6ba6fc2df9375bd741d21c7816f22c33e
| 1,739
|
/*******************************************************************************
* Copyright (c) 2020. Bytedance Inc.
*
* This source code is licensed under the MIT license found in the LICENSE file in the root directory of this source tree.
******************************************************************************/
package com.tiktok.appevents;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
@Test
public void test() {
List<Integer> aaa = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11);
System.out.println(aaa);
}
@Before
public void setup() {
System.out.println("setup");
MockitoAnnotations.initMocks(ExampleUnitTest.class);
}
class Person {
int aa() {
System.out.println("aa");
return 120;
}
}
@Mock
List t;
@Test
public void mockito() {
List mockList = new ArrayList();
List spyList = spy(mockList);
spyList.add("123");
spyList.add("123");
System.out.println(spyList.size());
Person p = spy(new Person());
when(p.aa()).thenReturn(456);
// doReturn(456).when(p).aa();
System.out.println(p.aa());
}
}
| 23.821918
| 122
| 0.559517
|
4742cfdf893f76f85aaae541b1f0f9c3503d7409
| 1,471
|
package com.project.packman.packman.repository.OfficeMatters.CustomQueryRequestRepositoryImpl;
import com.project.packman.packman.model.OfficeMatters.Comments;
import com.project.packman.packman.model.OfficeMatters.Request;
import com.project.packman.packman.model.OfficeMatters.RolesType.Status;
import com.project.packman.packman.repository.OfficeMatters.CustomQueryRequestRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
public class CustomQueryRequestRepositoryImpl implements CustomQueryRequestRepository {
@Autowired
private MongoTemplate mongoTemplate;
@Override
public void updateRequestStatusById(String id, Status status) {
Query query = new Query(Criteria.where("id").is(id));
Update update = new Update();
update.set("statuses",status);
mongoTemplate.update(Request.class).matching(query).apply(update).first();
}
@Override
public void addCommentToRequestById(String id, Comments comments) {
Query query = new Query(Criteria.where("id").is(id));
Update update = new Update();
update.addToSet("comments",comments);
mongoTemplate.update(Request.class).matching(query).apply(update).first();
}
}
| 42.028571
| 94
| 0.772944
|
b7b6b0ce4994a2066f3343d231a7b0712eb12ccc
| 555
|
package com.env.common.config;
import feign.Logger;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author Linshu 745698872@qq.com
* @date 2019/9/18 16:52
*/
@Configuration
@ConditionalOnClass(FeignClient.class)
public class BaseFeignConfiguration {
@Bean
Logger.Level feignLoggerLevel() {
return Logger.Level.FULL;
}
}
| 24.130435
| 75
| 0.781982
|
992c1c453415c039c144adc8feded51753264677
| 1,063
|
package com.prowidesoftware.swift.model.mx.dic;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for AssetClassDetailedSubProductType30Code.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="AssetClassDetailedSubProductType30Code">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="MWHT"/>
* <enumeration value="OTHR"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "AssetClassDetailedSubProductType30Code")
@XmlEnum
public enum AssetClassDetailedSubProductType30Code {
/**
* Commodity attribute of type milled wheat.
*
*/
MWHT,
/**
* Commodity attribute of other type.
*
*/
OTHR;
public String value() {
return name();
}
public static AssetClassDetailedSubProductType30Code fromValue(String v) {
return valueOf(v);
}
}
| 21.693878
| 95
| 0.669802
|
2590be7668bd38df538664e700876637b76334fb
| 2,325
|
package ca.mbarkley.jsim;
import ca.mbarkley.jsim.cli.BatchProcessor;
import ca.mbarkley.jsim.cli.TerminalProcessor;
import org.apache.commons.cli.*;
import org.jline.terminal.Terminal;
import org.jline.terminal.TerminalBuilder;
import java.io.Console;
import java.io.IOException;
public class Main {
public static final int DESIRED_WIDTH = 120;
public static void main(String[] args) throws IOException {
try {
final CommandLine commandLine = parseCommandLine(args);
if (commandLine.hasOption("c")) {
final String input = String.join(" ", commandLine.getArgs());
if (input.isEmpty()) {
System.err.println("No script specified with '-c' flag");
}
new BatchProcessor(DESIRED_WIDTH).process(input);
} else {
final Console console = System.console();
if (console != null) {
new TerminalProcessor().process(TerminalBuilder.builder()
.name("jsim")
.jna(true)
.build());
} else {
new BatchProcessor(DESIRED_WIDTH).process(System.in);
}
}
} catch (ParseException e) {
System.err.printf("Invalid arguments: %s\n", e.getMessage());
System.exit(1);
}
}
private static CommandLine parseCommandLine(String[] args) throws ParseException {
final Options options = options();
final CommandLineParser argParser = new DefaultParser();
return argParser.parse(options, args, true);
}
private static Options options() {
final Options options = new Options();
final OptionGroup inputGroup = new OptionGroup();
inputGroup.addOption(Option.builder("c")
.argName("command string")
.desc("run script from first non-option command arguments")
.required()
.build());
options.addOptionGroup(inputGroup);
return options;
}
}
| 35.769231
| 94
| 0.523871
|
549aa1919651d31ef7667a9fc09234200e7c8dc0
| 1,823
|
package org.visallo.web.routes.directory;
import com.google.inject.Inject;
import com.v5analytics.webster.ParameterizedHandler;
import com.v5analytics.webster.annotations.Handle;
import com.v5analytics.webster.annotations.Optional;
import com.v5analytics.webster.annotations.Required;
import org.visallo.core.model.directory.DirectoryRepository;
import org.visallo.core.user.User;
import org.visallo.web.clientapi.model.ClientApiDirectorySearchResponse;
import org.visallo.web.clientapi.model.DirectoryGroup;
import org.visallo.web.clientapi.model.DirectoryPerson;
import java.util.List;
public class DirectorySearch implements ParameterizedHandler {
private final DirectoryRepository directoryRepository;
@Inject
public DirectorySearch(DirectoryRepository directoryRepository) {
this.directoryRepository = directoryRepository;
}
@Handle
public ClientApiDirectorySearchResponse handle(
@Required(name = "search", allowEmpty = false) String search,
@Optional(name = "people", defaultValue = "true") boolean searchPeople,
@Optional(name = "groups", defaultValue = "true") boolean searchGroups,
User user
) {
ClientApiDirectorySearchResponse response = new ClientApiDirectorySearchResponse();
if (searchPeople) {
List<DirectoryPerson> people = this.directoryRepository.searchPeople(search, user);
for (DirectoryPerson person : people) {
response.getEntities().add(person);
}
}
if (searchGroups) {
List<DirectoryGroup> groups = this.directoryRepository.searchGroups(search, user);
for (DirectoryGroup group : groups) {
response.getEntities().add(group);
}
}
return response;
}
}
| 36.46
| 95
| 0.710368
|
189217dd27339df8417ea0e7e9d65bd9b045238c
| 415
|
package enumeration;
//We can define attributes inside the enumerations:
public enum Continents {
AMERICA(35),
AFRICA(54),
ASIA(48),
EUROPE(50),
OCEANIA(14);
private final int countries;
Continents(int countries){
this.countries = countries;
}
//We can only use GET because the values are final
public int getCountries(){
return this.countries;
}
}
| 17.291667
| 54
| 0.643373
|
913a75eeaa32b028a9978c1d89b86b27e524c138
| 637
|
package com.cc.listview.base.listener;
/**
* Created by Cheng on 16/7/26.
*/
public interface TXPullToRefreshLoadMoreListener {
// 设置正在刷新
void setRefreshing(boolean refreshing);
// 显示下拉刷新view
void showPullToRefreshView();
// 隐藏下拉刷新view
void hidePullToRefreshView();
// 设置是否可以下拉刷新
void setPullToRefreshEnable(boolean pullToRefreshEnable);
// 设置是否可以加载更多
void setLoadMoreEnable(boolean loadMoreEnable);
// 设置结束下拉刷新
void pullToRefreshFinish(boolean hasMore);
// 设置结束加载更多
void loadMoreFinish(boolean hasMore);
// 设置加载错误信息
void loadError(long code, String message);
}
| 19.90625
| 61
| 0.706436
|
0d334a3f7b29401172a8ccb95b3927c22d159b18
| 3,195
|
/*
* Copyright (C) 2006-2013 Bitronix Software (http://www.bitronix.be)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bitronix.tm.gui;
import bitronix.tm.journal.TransactionLogHeader;
import bitronix.tm.utils.Decoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Date;
/**
* @author Ludovic Orban
*/
public class TransactionLogHeaderPanel extends JPanel {
private final static Logger log = LoggerFactory.getLogger(TransactionLogHeaderPanel.class);
private final JTextField logFileField = new JTextField();
private final JTextField timestampField = new JTextField();
private final JTextField stateField = new JTextField();
private final JTextField positionField = new JTextField();
public TransactionLogHeaderPanel() {
logFileField.setEditable(false);
timestampField.setEditable(false);
stateField.setEditable(false);
positionField.setEditable(false);
logFileField.setBorder(null);
timestampField.setBorder(null);
stateField.setBorder(null);
positionField.setBorder(null);
setLayout(new BoxLayout(this, BoxLayout.X_AXIS));
add(logFileField);
add(timestampField);
add(stateField);
add(positionField);
}
public void setLogFile(File logFile) {
logFileField.setText(logFile.getName());
}
public void setTimestamp(long timestamp) {
timestampField.setText(Console.dateFormatter.format(new Date(timestamp)));
}
public void setState(byte state) {
stateField.setText(Decoder.decodeHeaderState(state));
}
public void setPosition(long position) {
positionField.setText("" + position);
}
public void read(File logFile, boolean active) throws IOException {
RandomAccessFile raf = new RandomAccessFile(logFile, "r");
TransactionLogHeader header = new TransactionLogHeader(raf.getChannel(), 0L);
raf.close();
if (log.isDebugEnabled()) { log.debug("read header: " + header); }
setLogFile(logFile);
setTimestamp(header.getTimestamp());
setState(header.getState());
setPosition(header.getPosition());
Font font;
if (active) {
font = logFileField.getFont().deriveFont(Font.BOLD);
}
else {
font = logFileField.getFont().deriveFont(Font.PLAIN);
}
logFileField.setFont(font);
timestampField.setFont(font);
stateField.setFont(font);
positionField.setFont(font);
}
}
| 32.272727
| 95
| 0.690141
|
1317bdc06520b550a9ef4cced567888f0b77d226
| 5,277
|
package agents;
import behaviour.ManageCallBehaviour;
import city.*;
import jade.core.AID;
import jade.core.Agent;
import jade.wrapper.AgentController;
import jade.wrapper.ContainerController;
import jade.wrapper.StaleProxyException;
import utils.misc.Shift;
import utils.simulation.CallGen;
import utils.simulation.Timer;
import utils.simulation.StdRandom;
import utils.io.In;
import utils.io.Out;
import java.util.ArrayList;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
public class TaxiCoordinator extends Agent {
private static final Out out = new Out("src/main/resources/output.txt");
public City vCity;
public Date nextTime = null;
public int calls = 0;
private int totalTaxis = 0;
public final ArrayList<AID> lstTaxi = new ArrayList<>(0);
public Request lastRequest;
private ArrayList<Passenger> passengerArrayList;
public Timer runtime;
public void out(String newLine) {
out.println(newLine);
}
public void close() {
out.close();
}
protected void setup() {
In in = new In("src/main/resources/v_city.txt");
System.out.println("Init of file");
System.out.println("Create City");
vCity = new City();
vCity.generateCity(in);
passengerArrayList = new ArrayList<>();
System.out.println("Done creating city");
System.out.println("Total Nodes " + vCity.intersections.size());
System.out.println("Generate Random Call for one intersection");
// Timer runtime = new Timer(0,0,0,1); //Setting initial time
runtime = new Timer(City.getFileTime(), 1); //Setting initial time
// 1. Setting a next call Time
generateSampleTaxis();
System.out.println("Setting next Call time");
nextTime = nextCall(runtime.getDate());
addBehaviour(new ManageCallBehaviour(this));
}
protected void takeDown() {
System.out.println("Taxi-agent " + getAID().getName() + "is offline");
// Make this agent terminate
doDelete();
}
public void receiveCall(Passenger passenger, Intersection intersection) {
intersection.receiveCall(passenger);
this.passengerArrayList.add(passenger);
this.vCity.passengerArrayList.add(passenger);
System.out.println("TaxiCoordinator: Received a call from Passenger " + passenger.id);
}
public Date nextCall(Date currentTime) {
return CallGen.nextCall(currentTime);
}
/**
* Choose a random intersection but not Taxi Center
*
* @param taxiCenter
* @return
*/
public int pickRandomIntersectionIndex(ArrayList<Intersection> intersections, int[] taxiCenter) {
int index;
do {
index = StdRandom.uniform(0, intersections.size() - 1);
} while (find(intersections.get(index).index, taxiCenter));
return index;
}
/**
* Choose a random intersection but not Taxi Center
*
* @param taxiCenter
* @return
*/
public int pickRandomDropoffIndex(ArrayList<DropoffPoint> dropoffPoints, int[] taxiCenter) {
int index;
do {
index = StdRandom.uniform(0, dropoffPoints.size() - 1);
} while (find(dropoffPoints.get(index).index, taxiCenter));
return index;
}
private boolean find(int index, int[] array) {
for (int i : array) {
if (i == index)
return true;
}
return false;
}
/**
* This method check if this Intersection should process a pending call
*
* @param nextCall Date for next Call
* @param currentTime Date of current time
* @return true when there is a call to be trigger
* false is there is no pending call to specific intersection
*/
public boolean isCallAvailable(Date nextCall, Date currentTime) {
return nextCall != null && nextCall.before(currentTime);
}
private void addTaxi(DropoffPoint point, Shift shift) {
Object[] params = {this.vCity, point, shift, totalTaxis + 1, runtime};
ContainerController cc = getContainerController();
String name;
try {
name = "smith" + totalTaxis++;
AgentController new_agent = cc.createNewAgent(name, "agents.Taxi", params);
new_agent.start();
lstTaxi.add(new AID(name, AID.ISLOCALNAME));
//taxiDrivers.add((Taxi)params[0]);
} catch (StaleProxyException ex) {
Logger.getLogger(TaxiCoordinator.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void generateSampleTaxis() {
for (int i = 1; i <= 4; i++) {
this.addTaxi(new DropoffPoint(this.vCity.taxiCenter), Shift.TIME_3AM_TO_1PM);
}
for (int i = 1; i <= 4; i++) {
this.addTaxi(new DropoffPoint(this.vCity.taxiCenter), Shift.TIME_6PM_TO_4AM);
}
for (int i = 1; i <= 4; i++) {
this.addTaxi(new DropoffPoint(this.vCity.taxiCenter), Shift.TIME_9AM_TO_7PM);
}
}
public static void main(String[] args) {
String[] arg = {"-gui", "-agents", "agents.TaxiCoordinator:agents.TaxiCoordinator"};
jade.Boot.main(arg);
}
}
| 30.859649
| 101
| 0.633693
|
7f83bcd2389f63e838ae78d10244694f8a8aad8e
| 139
|
package com.dw.tcc;
public class tcc {
public static void main(String[] args) {
// TODO Auto-generated method stub
}
}
| 12.636364
| 42
| 0.618705
|
584a88f1d8a51e98b09b452669917f72afbc76f5
| 1,124
|
/*
* This file is subject to the terms and conditions defined in 'LICENSE' file.
*/
package com.github.bradjacobs.yahoofinance.tools.internal.generator.types;
import com.github.bradjacobs.yahoofinance.tools.internal.generator.types.autogen.YahooFieldDefinition;
import java.util.List;
import java.util.stream.Collectors;
public class IpoEventFieldEnumGenerator extends AbstractFieldEnumGenerator
{
@Override
protected String getUrl() {
return "https://query1.finance.yahoo.com/v1/finance/screener/instrument/ipo_info/fields?lang=en-US®ion=US";
}
@Override
protected String getOutputClassName() {
return "IpoEventField";
}
@Override
protected String getTemplateFileName() {
return "field_template.txt";
}
@Override
protected List<YahooFieldDefinition> filterFields(List<YahooFieldDefinition> fieldList)
{
return fieldList.stream()
.filter(sf -> !sf.getDeprecated())
.filter(sf -> !sf.getFieldId().equalsIgnoreCase("count")) // skip count for now
.collect(Collectors.toList());
}
}
| 30.378378
| 118
| 0.699288
|
09c7263edcf1d44160e157ebc006aebc620acf8f
| 542
|
package cn.plusman.design.ratelimiter;
import com.google.common.util.concurrent.RateLimiter;
import java.time.Duration;
/**
* cn.plusman.design.ratelimiter
*
* @author plusman
* @since 12/26/20
*/
public class GuavaDemo {
public static void main(String[] args) {
RateLimiter limiter = RateLimiter.create(2.0, Duration.ofSeconds(1L));
while (true) {
Double d = limiter.acquire(10);
System.out.println(System.currentTimeMillis() + ":" + d);
}
// 了解 RateLimiter 限流算法
}
}
| 21.68
| 78
| 0.638376
|
aaf758f75087b4a3ed04636f677b2a423bf0d241
| 3,126
|
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
import org.apache.commons.io.FileUtils;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.testfx.api.FxRobot;
import org.testfx.framework.junit5.ApplicationExtension;
import org.testfx.framework.junit5.Start;
import proiect.fis.gym.aplication.controllers.ViewCoursesController;
import proiect.fis.gym.aplication.services.*;
import static org.testfx.assertions.api.Assertions.assertThat;
@ExtendWith(ApplicationExtension.class)
public class ViewCoursesTest {
@BeforeEach
void setUp() throws Exception{
System.out.println("before");
FileSystemService.APPLICATION_FOLDER = ".test-GymApplication";
FileSystemService.initDirectory();
FileUtils.cleanDirectory(FileSystemService.getApplicationHomeFolder().toFile());
GymManagerService.initTestDatabase("ManagersTest.db");
BankService.initDatabase();
AdminService.initDatabase();
CustomerService.initDatabase();
LoginService.initDatabase();
/*GymManagerService.getGymManagerRepository().insert(new GymManager("x", "sef", "+400757806405", "r@r.com", "tm",
"gym one", "GymOne", "meremere1@M"));
GymManagerService.getGymManagerRepository().insert(new GymManager("y", "sef", "+400757806405", "r@r.com", "tm",
"smartfit", "SmartFit", "meremere1@M"));*/
GymManagerService.addUser("meremere1@M","ana","are","+400757806405",
"ana@ana.com", "tm", "gym one", "GymOne", "meremere1@M");
}
@AfterEach
void tearDown() throws Exception{
System.out.println("after");
}
@Start
void start(Stage primaryStage) throws Exception {
Parent root = FXMLLoader.load(getClass().getClassLoader().getResource("fxml/login.fxml"));
primaryStage.setTitle("Gym Application");
primaryStage.setScene(new Scene(root, 400, 400));
primaryStage.show();
}
@Test
void viewCoursesTest(FxRobot robot) {
robot.clickOn("#LoginUsername");
robot.write("GymOne");
robot.clickOn("#LoginPassword");
robot.write("meremere1@M");
robot.clickOn("#LoginButton");
robot.clickOn("#addCourseButton");
robot.clickOn("#courseNameTextField");
robot.write("curs");
robot.clickOn("#trainerNameTextField");
robot.write("trainer");
robot.clickOn("#scheduleTextField");
robot.write("program");
robot.clickOn("#submitNewCourseButton");
robot.clickOn("#scheduleTextField");
robot.write("1");
robot.clickOn("#submitNewCourseButton");
robot.clickOn("#scheduleTextField");
robot.write("12");
robot.clickOn("#submitNewCourseButton");
robot.clickOn("#ViewCoursesButton");
assertThat(robot.lookup("#coursesTableView").queryTableView()).hasExactlyNumRows(3);
}
}
| 34.351648
| 121
| 0.678503
|
18254d1c1affa544fb6bfa7e8b6f5d03f4ad6621
| 1,485
|
package com.yiqiniu.easytrans.demos.wallet.api;
import java.io.Serializable;
/**
* define an interface for calling wallet
* the constraint of this interface is that:
* 1. only contains one method
* 2. the method only has one parameter, it's not basic class and implements Serializable interface
* 3. the return parameter should not be basic class too, and it has to implements Serializable interface
* 4. the return parameter can also be Future<>, the generalization parameter class should be like point 3
*/
public interface WalletPayMoneyService {
WalletPayResponseVO pay(WalletPayRequestVO request);
public static class WalletPayRequestVO implements Serializable {
private static final long serialVersionUID = 1L;
private Integer userId;
private Long payAmount;
public Long getPayAmount() {
return payAmount;
}
public void setPayAmount(Long payAmount) {
this.payAmount = payAmount;
}
public Integer getUserId() {
return userId;
}
public void setUserId(Integer userId) {
this.userId = userId;
}
}
public static class WalletPayResponseVO implements Serializable {
private static final long serialVersionUID = 1L;
private Long freezeAmount;
public Long getFreezeAmount() {
return freezeAmount;
}
public void setFreezeAmount(Long freezeAmount) {
this.freezeAmount = freezeAmount;
}
@Override
public String toString() {
return "WalletPayTccMethodResult [freezeAmount=" + freezeAmount + "]";
}
}
}
| 23.951613
| 106
| 0.747475
|
2a9816e62e38673ce094dfcbf95bbdf0fd33fc80
| 258
|
package com.hiskasoft.maven.process;
import java.io.File;
public interface Context {
public File getBasedir();
public String getGroupId();
public String getArtifactId();
public String getVersion();
public String getPackaging();
}
| 14.333333
| 36
| 0.705426
|
1b683166a029017151a5fe673c92cd0279bb1681
| 888
|
package com.github.rich.base.feign.fallback;
import com.github.rich.base.domain.dto.Route;
import com.github.rich.base.feign.GatewayRouteServiceFeignClient;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
/**
* @author Petty
*/
@Slf4j
public class GatewayRouteServiceFeignClientFallbackImpl implements GatewayRouteServiceFeignClient {
@Setter
private Throwable cause;
@Override
public List<Route> loadRoutes() {
log.error("Feign---GatewayRouteServiceFeignClient->loadRoutes Hystrix Fusing->Params:{},Date:{},Cause:{}", null, System.currentTimeMillis(), cause);
return null;
}
@Override
public Route load(String routeId) {
log.error("Feign---GatewayRouteServiceFeignClient->load Hystrix Fusing->Params:{},Date:{},Cause:{}", routeId, System.currentTimeMillis(), cause);
return null;
}
}
| 28.645161
| 156
| 0.725225
|
b849bbbbe0b9a14bc716a0dda25473f1bd7bd55f
| 5,181
|
package uk.co.onsdigital.discovery.model;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.validator.constraints.NotEmpty;
import uk.co.onsdigital.discovery.validation.annotation.DataResourceID;
import uk.co.onsdigital.discovery.validation.annotation.JSON;
import uk.co.onsdigital.discovery.validation.annotation.UUID;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
/**
* Model representing the Metadata fields of the DinmensionalDataSet table.
*/
public class DatasetMetadata {
@JSON(message = "dataset.json.metadata.invalid")
private String jsonMetadata;
@NotEmpty(message = "dataset.id.empty")
@UUID
private String datasetId;
@DataResourceID
private String dataResource;
@NotNull(message = "dataset.major.version.empty")
private Integer majorVersion;
@NotEmpty(message = "dataset.major.label.empty")
@Pattern(regexp = "[a-zA-Z0-9_\\-]*", message = "dataset.major.label.regex")
private String majorLabel;
@NotNull(message = "dataset.minor.version.empty")
private Integer minorVersion;
private String revisionNotes;
private String revisionReason;
@NotEmpty (message = "dataset.title.empty")
private String title;
public String getRevisionNotes() {
return revisionNotes;
}
public DatasetMetadata setRevisionNotes(String revisionNotes) {
this.revisionNotes = revisionNotes;
return this;
}
public String getRevisionReason() {
return revisionReason;
}
public DatasetMetadata setRevisionReason(String revisionReason) {
this.revisionReason = revisionReason;
return this;
}
public String getJsonMetadata() {
return jsonMetadata;
}
public DatasetMetadata setJsonMetadata(String jsonMetadata) {
this.jsonMetadata = jsonMetadata;
return this;
}
public String getDatasetId() {
return datasetId;
}
public DatasetMetadata setDatasetId(String datasetId) {
this.datasetId = datasetId;
return this;
}
public Integer getMajorVersion() {
return majorVersion;
}
public DatasetMetadata setMajorVersion(Integer majorVersion) {
this.majorVersion = majorVersion;
return this;
}
public Integer getMinorVersion() {
return minorVersion;
}
public DatasetMetadata setMinorVersion(Integer minorVersion) {
this.minorVersion = minorVersion;
return this;
}
public String getDataResource() {
return dataResource;
}
public DatasetMetadata setDataResource(String dataResource) {
this.dataResource = dataResource;
return this;
}
public String getMajorLabel() {
return majorLabel;
}
public DatasetMetadata setMajorLabel(String majorLabel) {
this.majorLabel = majorLabel;
return this;
}
public String getTitle() {
return title;
}
public DatasetMetadata setTitle(String title) {
this.title = title;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DatasetMetadata that = (DatasetMetadata) o;
return new EqualsBuilder()
.append(getJsonMetadata(), that.getJsonMetadata())
.append(getDatasetId(), that.getDatasetId())
.append(getDataResource(), that.getDataResource())
.append(getMajorVersion(), that.getMajorVersion())
.append(getMinorVersion(), that.getMinorVersion())
.append(getRevisionNotes(), that.getRevisionNotes())
.append(getRevisionReason(), that.getRevisionReason())
.append(getMajorLabel(), that.getMajorLabel())
.append(getTitle(), that.getTitle())
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(getJsonMetadata())
.append(getDatasetId())
.append(getDataResource())
.append(getMajorVersion())
.append(getMajorLabel())
.append(getMinorVersion())
.append(getRevisionNotes())
.append(getRevisionReason())
.append(getTitle())
.toHashCode();
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("jsonMetadata", jsonMetadata)
.append("datasetId", datasetId)
.append("dataResource", dataResource)
.append("majorVersion", majorVersion)
.append("majorLabel", majorLabel)
.append("minorVersion", minorVersion)
.append("revisionNotes", revisionNotes)
.append("revisionReason", revisionReason)
.append("title", title)
.toString();
}
}
| 29.4375
| 80
| 0.635592
|
54de03695baae01c4a38dbd01dfbcb06c31d4c56
| 292
|
package org.jetlinks.community.gateway;
import org.jetlinks.core.message.codec.EncodedMessage;
public interface EncodableMessage extends EncodedMessage {
Object getNativePayload();
static EncodableMessage of(Object object) {
return new JsonEncodedMessage(object);
}
}
| 22.461538
| 58
| 0.767123
|
25d44041c90224482cb670a95cfebf3cfcd1dc15
| 24,955
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.*;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.*;
import org.apache.solr.search.function.BoostedQuery;
import org.apache.solr.search.function.FunctionQuery;
import org.apache.solr.search.function.QueryValueSource;
import org.apache.solr.search.function.ValueSource;
import org.apache.solr.util.SolrPluginUtils;
import java.io.IOException;
import java.net.URL;
import java.util.*;
/**
* TODO!
*
* @version $Id: QueryComponent.java 987690 2010-08-21 02:36:09Z yonik $
* @since solr 1.3
*/
public class QueryComponent extends SearchComponent
{
public static final String COMPONENT_NAME = "query";
@Override
public void prepare(ResponseBuilder rb) throws IOException
{
SolrQueryRequest req = rb.req;
SolrParams params = req.getParams();
if (!params.getBool(COMPONENT_NAME, true)) {
return;
}
SolrQueryResponse rsp = rb.rsp;
// Set field flags
String fl = params.get(CommonParams.FL);
int fieldFlags = 0;
if (fl != null) {
fieldFlags |= SolrPluginUtils.setReturnFields(fl, rsp);
}
rb.setFieldFlags( fieldFlags );
String defType = params.get(QueryParsing.DEFTYPE,QParserPlugin.DEFAULT_QTYPE);
if (rb.getQueryString() == null) {
rb.setQueryString( params.get( CommonParams.Q ) );
}
try {
QParser parser = QParser.getParser(rb.getQueryString(), defType, req);
rb.setQuery( parser.getQuery() );
rb.setSortSpec( parser.getSort(true) );
rb.setQparser(parser);
String[] fqs = req.getParams().getParams(CommonParams.FQ);
if (fqs!=null && fqs.length!=0) {
List<Query> filters = rb.getFilters();
if (filters==null) {
filters = new ArrayList<Query>();
rb.setFilters( filters );
}
for (String fq : fqs) {
if (fq != null && fq.trim().length()!=0) {
QParser fqp = QParser.getParser(fq, null, req);
filters.add(fqp.getQuery());
}
}
}
} catch (ParseException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
// TODO: temporary... this should go in a different component.
String shards = params.get(ShardParams.SHARDS);
if (shards != null) {
List<String> lst = StrUtils.splitSmart(shards, ",", true);
rb.shards = lst.toArray(new String[lst.size()]);
}
String shards_rows = params.get(ShardParams.SHARDS_ROWS);
if(shards_rows != null) {
rb.shards_rows = Integer.parseInt(shards_rows);
}
String shards_start = params.get(ShardParams.SHARDS_START);
if(shards_start != null) {
rb.shards_start = Integer.parseInt(shards_start);
}
}
/**
* Actually run the query
*/
@Override
public void process(ResponseBuilder rb) throws IOException
{
SolrQueryRequest req = rb.req;
SolrQueryResponse rsp = rb.rsp;
SolrParams params = req.getParams();
if (!params.getBool(COMPONENT_NAME, true)) {
return;
}
SolrIndexSearcher searcher = req.getSearcher();
if (rb.getQueryCommand().getOffset() < 0) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
}
// -1 as flag if not set.
long timeAllowed = (long)params.getInt( CommonParams.TIME_ALLOWED, -1 );
// Optional: This could also be implemented by the top-level searcher sending
// a filter that lists the ids... that would be transparent to
// the request handler, but would be more expensive (and would preserve score
// too if desired).
String ids = params.get(ShardParams.IDS);
if (ids != null) {
SchemaField idField = req.getSchema().getUniqueKeyField();
List<String> idArr = StrUtils.splitSmart(ids, ",", true);
int[] luceneIds = new int[idArr.size()];
int docs = 0;
for (int i=0; i<idArr.size(); i++) {
int id = req.getSearcher().getFirstMatch(
new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
if (id >= 0)
luceneIds[docs++] = id;
}
DocListAndSet res = new DocListAndSet();
res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
if (rb.isNeedDocSet()) {
List<Query> queries = new ArrayList<Query>();
queries.add(rb.getQuery());
List<Query> filters = rb.getFilters();
if (filters != null) queries.addAll(filters);
res.docSet = searcher.getDocSet(queries);
}
rb.setResults(res);
rsp.add("response",rb.getResults().docList);
return;
}
SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
cmd.setTimeAllowed(timeAllowed);
SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();
//
// grouping / field collapsing
//
boolean doGroup = params.getBool(GroupParams.GROUP, false);
if (doGroup) {
try {
cmd.groupCommands = new ArrayList<SolrIndexSearcher.GroupCommand>();
String[] fields = params.getParams(GroupParams.GROUP_FIELD);
String[] funcs = params.getParams(GroupParams.GROUP_FUNC);
String[] queries = params.getParams(GroupParams.GROUP_QUERY);
String groupSortStr = params.get(GroupParams.GROUP_SORT);
Sort groupSort = groupSortStr != null ? QueryParsing.parseSort(groupSortStr, req.getSchema()) : null;
int limitDefault = cmd.getLen(); // this is normally from "rows"
int docsPerGroupDefault = params.getInt(GroupParams.GROUP_LIMIT, 1);
// temporary: implement all group-by-field as group-by-func
if (funcs == null) {
funcs = fields;
} else if (fields != null) {
// catenate functions and fields
String[] both = new String[fields.length + funcs.length];
System.arraycopy(fields, 0, both, 0, fields.length);
System.arraycopy(funcs, 0, both, fields.length, funcs.length);
funcs = both;
}
if (funcs != null) {
for (String groupByStr : funcs) {
QParser parser = QParser.getParser(groupByStr, "func", rb.req);
Query q = parser.getQuery();
SolrIndexSearcher.GroupCommandFunc gc;
if (groupSort != null) {
SolrIndexSearcher.GroupSortCommand gcSort = new SolrIndexSearcher.GroupSortCommand();
gcSort.sort = groupSort;
gc = gcSort;
} else {
gc = new SolrIndexSearcher.GroupCommandFunc();
}
if (q instanceof FunctionQuery) {
gc.groupBy = ((FunctionQuery)q).getValueSource();
} else {
gc.groupBy = new QueryValueSource(q, 0.0f);
}
gc.key = groupByStr;
gc.groupLimit = limitDefault;
gc.docsPerGroup = docsPerGroupDefault;
cmd.groupCommands.add(gc);
}
}
if (cmd.groupCommands.size() == 0)
cmd.groupCommands = null;
if (cmd.groupCommands != null) {
searcher.search(result,cmd);
rsp.add("grouped", result.groupedResults);
return;
}
} catch (ParseException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
}
// normal search result
searcher.search(result,cmd);
rb.setResult( result );
rsp.add("response",rb.getResults().docList);
rsp.getToLog().add("hits", rb.getResults().docList.matches());
doFieldSortValues(rb, searcher);
doPrefetch(rb);
}
protected void doFieldSortValues(ResponseBuilder rb, SolrIndexSearcher searcher) throws IOException
{
SolrQueryRequest req = rb.req;
SolrQueryResponse rsp = rb.rsp;
// The query cache doesn't currently store sort field values, and SolrIndexSearcher doesn't
// currently have an option to return sort field values. Because of this, we
// take the documents given and re-derive the sort values.
boolean fsv = req.getParams().getBool(ResponseBuilder.FIELD_SORT_VALUES,false);
if(fsv){
Sort sort = rb.getSortSpec().getSort();
SortField[] sortFields = sort==null ? new SortField[]{SortField.FIELD_SCORE} : sort.getSort();
NamedList sortVals = new NamedList(); // order is important for the sort fields
Field field = new Field("dummy", "", Field.Store.YES, Field.Index.NO); // a dummy Field
SolrIndexReader reader = searcher.getReader();
SolrIndexReader[] readers = reader.getLeafReaders();
SolrIndexReader subReader = reader;
if (readers.length==1) {
// if there is a single segment, use that subReader and avoid looking up each time
subReader = readers[0];
readers=null;
}
int[] offsets = reader.getLeafOffsets();
for (SortField sortField: sortFields) {
int type = sortField.getType();
if (type==SortField.SCORE || type==SortField.DOC) continue;
FieldComparator comparator = null;
FieldComparator comparators[] = (readers==null) ? null : new FieldComparator[readers.length];
String fieldname = sortField.getField();
FieldType ft = fieldname==null ? null : req.getSchema().getFieldTypeNoEx(fieldname);
DocList docList = rb.getResults().docList;
ArrayList<Object> vals = new ArrayList<Object>(docList.size());
DocIterator it = rb.getResults().docList.iterator();
int offset = 0;
int idx = 0;
while(it.hasNext()) {
int doc = it.nextDoc();
if (readers != null) {
idx = SolrIndexReader.readerIndex(doc, offsets);
subReader = readers[idx];
offset = offsets[idx];
comparator = comparators[idx];
}
if (comparator == null) {
comparator = sortField.getComparator(1,0);
comparator.setNextReader(subReader, offset);
if (comparators != null)
comparators[idx] = comparator;
}
doc -= offset; // adjust for what segment this is in
comparator.copy(0, doc);
Object val = comparator.value(0);
// Sortable float, double, int, long types all just use a string
// comparator. For these, we need to put the type into a readable
// format. One reason for this is that XML can't represent all
// string values (or even all unicode code points).
// indexedToReadable() should be a no-op and should
// thus be harmless anyway (for all current ways anyway)
if (val instanceof String) {
field.setValue((String)val);
val = ft.toObject(field);
}
// Must do the same conversion when sorting by a
// String field in Lucene, which returns the terms
// data as BytesRef:
if (val instanceof BytesRef) {
field.setValue(((BytesRef)val).utf8ToString());
val = ft.toObject(field);
}
vals.add(val);
}
sortVals.add(fieldname, vals);
}
rsp.add("sort_values", sortVals);
}
}
protected void doPrefetch(ResponseBuilder rb) throws IOException
{
SolrQueryRequest req = rb.req;
SolrQueryResponse rsp = rb.rsp;
//pre-fetch returned documents
if (!req.getParams().getBool(ShardParams.IS_SHARD,false) && rb.getResults().docList != null && rb.getResults().docList.size()<=50) {
// TODO: this may depend on the highlighter component (or other components?)
SolrPluginUtils.optimizePreFetchDocs(rb.getResults().docList, rb.getQuery(), req, rsp);
}
}
@Override
public int distributedProcess(ResponseBuilder rb) throws IOException {
if (rb.stage < ResponseBuilder.STAGE_PARSE_QUERY)
return ResponseBuilder.STAGE_PARSE_QUERY;
if (rb.stage == ResponseBuilder.STAGE_PARSE_QUERY) {
createDistributedIdf(rb);
return ResponseBuilder.STAGE_EXECUTE_QUERY;
}
if (rb.stage < ResponseBuilder.STAGE_EXECUTE_QUERY) return ResponseBuilder.STAGE_EXECUTE_QUERY;
if (rb.stage == ResponseBuilder.STAGE_EXECUTE_QUERY) {
createMainQuery(rb);
return ResponseBuilder.STAGE_GET_FIELDS;
}
if (rb.stage < ResponseBuilder.STAGE_GET_FIELDS) return ResponseBuilder.STAGE_GET_FIELDS;
if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
createRetrieveDocs(rb);
return ResponseBuilder.STAGE_DONE;
}
return ResponseBuilder.STAGE_DONE;
}
@Override
public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0) {
mergeIds(rb, sreq);
}
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
returnFields(rb, sreq);
return;
}
}
@Override
public void finishStage(ResponseBuilder rb) {
if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
// We may not have been able to retrieve all the docs due to an
// index change. Remove any null documents.
for (Iterator<SolrDocument> iter = rb._responseDocs.iterator(); iter.hasNext();) {
if (iter.next() == null) {
iter.remove();
rb._responseDocs.setNumFound(rb._responseDocs.getNumFound()-1);
}
}
rb.rsp.add("response", rb._responseDocs);
}
}
private void createDistributedIdf(ResponseBuilder rb) {
// TODO
}
private void createMainQuery(ResponseBuilder rb) {
ShardRequest sreq = new ShardRequest();
sreq.purpose = ShardRequest.PURPOSE_GET_TOP_IDS;
sreq.params = new ModifiableSolrParams(rb.req.getParams());
// TODO: base on current params or original params?
// don't pass through any shards param
sreq.params.remove(ShardParams.SHARDS);
// set the start (offset) to 0 for each shard request so we can properly merge
// results from the start.
if(rb.shards_start > -1) {
// if the client set shards.start set this explicitly
sreq.params.set(CommonParams.START,rb.shards_start);
} else {
sreq.params.set(CommonParams.START, "0");
}
// TODO: should we even use the SortSpec? That's obtained from the QParser, and
// perhaps we shouldn't attempt to parse the query at this level?
// Alternate Idea: instead of specifying all these things at the upper level,
// we could just specify that this is a shard request.
if(rb.shards_rows > -1) {
// if the client set shards.rows set this explicity
sreq.params.set(CommonParams.ROWS,rb.shards_rows);
} else {
sreq.params.set(CommonParams.ROWS, rb.getSortSpec().getOffset() + rb.getSortSpec().getCount());
}
// in this first phase, request only the unique key field
// and any fields needed for merging.
sreq.params.set(ResponseBuilder.FIELD_SORT_VALUES,"true");
if ( (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES)!=0 || rb.getSortSpec().includesScore()) {
sreq.params.set(CommonParams.FL, rb.req.getSchema().getUniqueKeyField().getName() + ",score");
} else {
sreq.params.set(CommonParams.FL, rb.req.getSchema().getUniqueKeyField().getName());
}
rb.addRequest(this, sreq);
}
private void mergeIds(ResponseBuilder rb, ShardRequest sreq) {
SortSpec ss = rb.getSortSpec();
Sort sort = ss.getSort();
SortField[] sortFields = null;
if(sort != null) sortFields = sort.getSort();
else {
sortFields = new SortField[]{SortField.FIELD_SCORE};
}
SchemaField uniqueKeyField = rb.req.getSchema().getUniqueKeyField();
// id to shard mapping, to eliminate any accidental dups
HashMap<Object,String> uniqueDoc = new HashMap<Object,String>();
// Merge the docs via a priority queue so we don't have to sort *all* of the
// documents... we only need to order the top (rows+start)
ShardFieldSortedHitQueue queue;
queue = new ShardFieldSortedHitQueue(sortFields, ss.getOffset() + ss.getCount());
long numFound = 0;
Float maxScore=null;
for (ShardResponse srsp : sreq.responses) {
SolrDocumentList docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response");
// calculate global maxScore and numDocsFound
if (docs.getMaxScore() != null) {
maxScore = maxScore==null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore());
}
numFound += docs.getNumFound();
NamedList sortFieldValues = (NamedList)(srsp.getSolrResponse().getResponse().get("sort_values"));
// go through every doc in this response, construct a ShardDoc, and
// put it in the priority queue so it can be ordered.
for (int i=0; i<docs.size(); i++) {
SolrDocument doc = docs.get(i);
Object id = doc.getFieldValue(uniqueKeyField.getName());
String prevShard = uniqueDoc.put(id, srsp.getShard());
if (prevShard != null) {
// duplicate detected
numFound--;
// For now, just always use the first encountered since we can't currently
// remove the previous one added to the priority queue. If we switched
// to the Java5 PriorityQueue, this would be easier.
continue;
// make which duplicate is used deterministic based on shard
// if (prevShard.compareTo(srsp.shard) >= 0) {
// TODO: remove previous from priority queue
// continue;
// }
}
ShardDoc shardDoc = new ShardDoc();
shardDoc.id = id;
shardDoc.shard = srsp.getShard();
shardDoc.orderInShard = i;
Object scoreObj = doc.getFieldValue("score");
if (scoreObj != null) {
if (scoreObj instanceof String) {
shardDoc.score = Float.parseFloat((String)scoreObj);
} else {
shardDoc.score = (Float)scoreObj;
}
}
shardDoc.sortFieldValues = sortFieldValues;
queue.insertWithOverflow(shardDoc);
} // end for-each-doc-in-response
} // end for-each-response
// The queue now has 0 -> queuesize docs, where queuesize <= start + rows
// So we want to pop the last documents off the queue to get
// the docs offset -> queuesize
int resultSize = queue.size() - ss.getOffset();
resultSize = Math.max(0, resultSize); // there may not be any docs in range
Map<Object,ShardDoc> resultIds = new HashMap<Object,ShardDoc>();
for (int i=resultSize-1; i>=0; i--) {
ShardDoc shardDoc = (ShardDoc)queue.pop();
shardDoc.positionInResponse = i;
// Need the toString() for correlation with other lists that must
// be strings (like keys in highlighting, explain, etc)
resultIds.put(shardDoc.id.toString(), shardDoc);
}
SolrDocumentList responseDocs = new SolrDocumentList();
if (maxScore!=null) responseDocs.setMaxScore(maxScore);
responseDocs.setNumFound(numFound);
responseDocs.setStart(ss.getOffset());
// size appropriately
for (int i=0; i<resultSize; i++) responseDocs.add(null);
// save these results in a private area so we can access them
// again when retrieving stored fields.
// TODO: use ResponseBuilder (w/ comments) or the request context?
rb.resultIds = resultIds;
rb._responseDocs = responseDocs;
}
private void createRetrieveDocs(ResponseBuilder rb) {
// TODO: in a system with nTiers > 2, we could be passed "ids" here
// unless those requests always go to the final destination shard
// for each shard, collect the documents for that shard.
HashMap<String, Collection<ShardDoc>> shardMap = new HashMap<String,Collection<ShardDoc>>();
for (ShardDoc sdoc : rb.resultIds.values()) {
Collection<ShardDoc> shardDocs = shardMap.get(sdoc.shard);
if (shardDocs == null) {
shardDocs = new ArrayList<ShardDoc>();
shardMap.put(sdoc.shard, shardDocs);
}
shardDocs.add(sdoc);
}
SchemaField uniqueField = rb.req.getSchema().getUniqueKeyField();
// Now create a request for each shard to retrieve the stored fields
for (Collection<ShardDoc> shardDocs : shardMap.values()) {
ShardRequest sreq = new ShardRequest();
sreq.purpose = ShardRequest.PURPOSE_GET_FIELDS;
sreq.shards = new String[] {shardDocs.iterator().next().shard};
sreq.params = new ModifiableSolrParams();
// add original params
sreq.params.add( rb.req.getParams());
// no need for a sort, we already have order
sreq.params.remove(CommonParams.SORT);
// we already have the field sort values
sreq.params.remove(ResponseBuilder.FIELD_SORT_VALUES);
// make sure that the id is returned for correlation.
String fl = sreq.params.get(CommonParams.FL);
if (fl != null) {
fl = fl.trim();
// currently, "score" is synonymous with "*,score" so
// don't add "id" if the fl is empty or "score" or it would change the meaning.
if (fl.length()!=0 && !"score".equals(fl) && !"*".equals(fl)) {
sreq.params.set(CommonParams.FL, fl+','+uniqueField.getName());
}
}
ArrayList<String> ids = new ArrayList<String>(shardDocs.size());
for (ShardDoc shardDoc : shardDocs) {
// TODO: depending on the type, we may need more tha a simple toString()?
ids.add(shardDoc.id.toString());
}
sreq.params.add(ShardParams.IDS, StrUtils.join(ids, ','));
rb.addRequest(this, sreq);
}
}
private void returnFields(ResponseBuilder rb, ShardRequest sreq) {
// Keep in mind that this could also be a shard in a multi-tiered system.
// TODO: if a multi-tiered system, it seems like some requests
// could/should bypass middlemen (like retrieving stored fields)
// TODO: merge fsv to if requested
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
boolean returnScores = (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES) != 0;
assert(sreq.responses.size() == 1);
ShardResponse srsp = sreq.responses.get(0);
SolrDocumentList docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response");
String keyFieldName = rb.req.getSchema().getUniqueKeyField().getName();
for (SolrDocument doc : docs) {
Object id = doc.getFieldValue(keyFieldName);
ShardDoc sdoc = rb.resultIds.get(id.toString());
if (sdoc != null) {
if (returnScores && sdoc.score != null) {
doc.setField("score", sdoc.score);
}
rb._responseDocs.set(sdoc.positionInResponse, doc);
}
}
}
}
/////////////////////////////////////////////
/// SolrInfoMBean
////////////////////////////////////////////
@Override
public String getDescription() {
return "query";
}
@Override
public String getVersion() {
return "$Revision: 987690 $";
}
@Override
public String getSourceId() {
return "$Id: QueryComponent.java 987690 2010-08-21 02:36:09Z yonik $";
}
@Override
public String getSource() {
return "$URL: https://svn.apache.org/repos/asf/lucene/dev/trunk/solr/src/java/org/apache/solr/handler/component/QueryComponent.java $";
}
@Override
public URL[] getDocs() {
return null;
}
}
| 36.377551
| 139
| 0.643478
|
c94af6be7d142c33ffdf91895861176a1b49babb
| 2,363
|
/*-
* ========================LICENSE_START=================================
* io.openslice.sol005nbi.osm5
* %%
* Copyright (C) 2019 openslice.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================LICENSE_END==================================
*/
package osm5.ns.yang.nfvo.instantiation.parameters.rev180914;
import java.util.List;
import javax.annotation.Nullable;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.common.QName;
import osm5.ns.yang.nfvo.instantiation.parameters.rev180914.vld_params.VnfdConnectionPointRef;
/**
*
* <p>
* This class represents the following YANG schema fragment defined in module <b>instantiation-parameters</b>
* <pre>
* grouping vld_params {
* uses vld_common_params;
* list vnfd-connection-point-ref {
* key "member-vnf-index-ref vnfd-connection-point-ref";
* leaf member-vnf-index-ref {
* type leafref {
* path /nsd:nsd-catalog/nsd:nsd/nsd:constituent-vnfd/nsd:member-vnf-index;
* }
* }
* leaf vnfd-connection-point-ref {
* type leafref {
* path /vnfd:vnfd-catalog/vnfd:vnfd/vnfd:connection-point/vnfd:name;
* }
* }
* leaf ip-address {
* type inet:ip-address;
* }
* }
* }
* </pre>The schema path to identify an instance is
* <i>instantiation-parameters/vld_params</i>
*
*/
public interface VldParams
extends
DataObject,
VldCommonParams
{
public static final QName QNAME = $YangModuleInfoImpl.qnameOf("vld_params");
/**
* @return <code>java.util.List</code> <code>vnfdConnectionPointRef</code>, or <code>null</code> if not present
*/
@Nullable
List<VnfdConnectionPointRef> getVnfdConnectionPointRef();
}
| 31.506667
| 116
| 0.639865
|
c5324ce851cf2bbdf04f53c046b4dc4c8ddd3165
| 1,021
|
//package com.ueh.paperless.controllers;
//
//import java.sql.Date;
//
//import org.springframework.stereotype.Controller;
//import org.springframework.ui.Model;
//import org.springframework.web.bind.annotation.GetMapping;
//import org.springframework.web.bind.annotation.PostMapping;
//import org.springframework.web.bind.annotation.RequestParam;
//
//import com.ueh.paperless.domain.CourseSession;
//
//@Controller
//public class CourseSessionController {
//
// @GetMapping("/create-courseSession")
// public String crateCourseSession(Model model) {
// model.addAttribute("CourseSession", new CourseSession());
// return "/Course/courseSession";
// }
//
// @PostMapping()
// public String newCourseSession(@RequestParam Date dateStart, @RequestParam Date dateEnd,
// @RequestParam String course) {
// CourseSession coursesession = new CourseSession();
// coursesession.setDateStart(dateStart);
// coursesession.setDateEnd(dateEnd);
//
// return "/Course/course";
//
// }
//}
| 30.939394
| 93
| 0.72478
|
fe169d426c0cd84b6fc0b737fe8243786a7acdf4
| 1,801
|
/*
Copyright (c) 2019 Criteo
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.criteo.vips;
public class PixelPacket extends Vips {
double r;
double g;
double b;
double a;
public PixelPacket(double r, double g, double b, double a) {
this.r = r;
this.g = g;
this.b = b;
this.a = a;
}
public PixelPacket(double r, double g, double b) {
this(r, g, b, 255.0);
}
public double getRed() {
return r;
}
public double getGreen() {
return g;
}
public double getBlue() {
return b;
}
public double getAlpha() {
return a;
}
public void setRed(double r) {
this.r = r;
}
public void setGreen(double g) {
this.g = g;
}
public void setBlue(double b) {
this.b = b;
}
public void setAlpha(double a) {
this.a = a;
}
public double[] getComponents() {
return new double[] { r, g, b, a };
}
@Override
public boolean equals(Object o) {
if (o instanceof PixelPacket) {
PixelPacket pixelPacket = (PixelPacket) o;
return r == pixelPacket.r && g == pixelPacket.g && b == pixelPacket.b && a == pixelPacket.a;
}
return false;
}
}
| 21.963415
| 104
| 0.591893
|
2f9744547325c4f2c7a42498638f7ca108108f4f
| 4,856
|
/*
Copyright 2012 - 2014 Jerome Leleu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pac4j.oauth.client;
import org.pac4j.core.client.RedirectAction;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.util.CommonHelper;
import org.pac4j.oauth.client.exception.OAuthCredentialsException;
import org.pac4j.oauth.profile.JsonHelper;
import org.pac4j.oauth.profile.OAuthAttributesDefinitions;
import org.pac4j.oauth.profile.google2.Google2Profile;
import org.scribe.builder.api.GoogleApi20;
import org.scribe.model.OAuthConfig;
import org.scribe.model.SignatureType;
import org.scribe.model.Token;
import org.scribe.oauth.ProxyOAuth20ServiceImpl;
import com.fasterxml.jackson.databind.JsonNode;
/**
* This class is the OAuth client to authenticate users in Google using OAuth protocol version 2.0.
* <p />
* The <i>scope</i> is by default : {@link Google2Scope#EMAIL}, but it can also but set to : {@link Google2Scope#PROFILE} or
* {@link Google2Scope#EMAIL_AND_PROFILE}.
* <p />
* It returns a {@link org.pac4j.oauth.profile.google2.Google2Profile}.
* <p />
* More information at https://developers.google.com/accounts/docs/OAuth2Login
*
* @see org.pac4j.oauth.profile.google2.Google2Profile
* @author Jerome Leleu
* @since 1.2.0
*/
public class Google2Client extends BaseOAuth20Client<Google2Profile> {
public enum Google2Scope {
EMAIL,
PROFILE,
EMAIL_AND_PROFILE
};
protected final String PROFILE_SCOPE = "https://www.googleapis.com/auth/userinfo.profile";
protected final String EMAIL_SCOPE = "https://www.googleapis.com/auth/userinfo.email";
protected Google2Scope scope = Google2Scope.EMAIL_AND_PROFILE;
protected String scopeValue;
public Google2Client() {
}
public Google2Client(final String key, final String secret) {
setKey(key);
setSecret(secret);
}
@Override
protected Google2Client newClient() {
final Google2Client newClient = new Google2Client();
newClient.setScope(this.scope);
return newClient;
}
@Override
public RedirectAction retrieveLogoutRedirectAction(Google2Profile google2Profile, WebContext context) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
protected void internalInit() {
super.internalInit();
CommonHelper.assertNotNull("scope", this.scope);
if (this.scope == Google2Scope.EMAIL) {
this.scopeValue = this.EMAIL_SCOPE;
} else if (this.scope == Google2Scope.PROFILE) {
this.scopeValue = this.PROFILE_SCOPE;
} else {
this.scopeValue = this.PROFILE_SCOPE + " " + this.EMAIL_SCOPE;
}
this.service = new ProxyOAuth20ServiceImpl(new GoogleApi20(), new OAuthConfig(this.key, this.secret,
this.callbackUrl,
SignatureType.Header,
this.scopeValue, null),
this.connectTimeout, this.readTimeout, this.proxyHost,
this.proxyPort, false, true);
}
@Override
protected String getProfileUrl(final Token accessToken) {
return "https://www.googleapis.com/oauth2/v2/userinfo";
}
@Override
protected Google2Profile extractUserProfile(final String body) {
final Google2Profile profile = new Google2Profile();
final JsonNode json = JsonHelper.getFirstNode(body);
if (json != null) {
profile.setId(JsonHelper.get(json, "id"));
for (final String attribute : OAuthAttributesDefinitions.google2Definition.getPrincipalAttributes()) {
profile.addAttribute(attribute, JsonHelper.get(json, attribute));
}
}
return profile;
}
public Google2Scope getScope() {
return this.scope;
}
public void setScope(final Google2Scope scope) {
this.scope = scope;
}
@Override
protected boolean requiresStateParameter() {
return false;
}
@Override
protected boolean hasBeenCancelled(final WebContext context) {
final String error = context.getRequestParameter(OAuthCredentialsException.ERROR);
// user has denied permissions
if ("access_denied".equals(error)) {
return true;
} else {
return false;
}
}
}
| 33.722222
| 124
| 0.689044
|
b950593cc71ee9c7c3b4690452abf52eb1b86dcd
| 9,862
|
package com.comphenix.protocol.reflect.accessors;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.List;
import com.comphenix.protocol.reflect.ExactReflection;
import com.comphenix.protocol.reflect.FuzzyReflection;
import com.google.common.base.Joiner;
public final class Accessors {
/**
* Represents a field accessor that synchronizes access to the underlying field.
* @author Kristian
*/
public static final class SynchronizedFieldAccessor implements FieldAccessor {
private final FieldAccessor accessor;
private SynchronizedFieldAccessor(FieldAccessor accessor) {
this.accessor = accessor;
}
@Override
public void set(Object instance, Object value) {
Object lock = accessor.get(instance);
if (lock != null) {
synchronized (lock) {
accessor.set(instance, value);
}
} else {
accessor.set(instance, value);
}
}
@Override
public Object get(Object instance) {
return accessor.get(instance);
}
@Override
public Field getField() {
return accessor.getField();
}
}
/**
* Retrieve an accessor for the first field of the given type.
* @param instanceClass - the type of the instance to retrieve.
* @param fieldClass - type of the field to retrieve.
* @param forceAccess - whether or not to look for private and protected fields.
* @return The field accessor.
* @throws IllegalArgumentException If the field cannot be found.
*/
public static FieldAccessor getFieldAccessor(Class<?> instanceClass, Class<?> fieldClass, boolean forceAccess) {
// Get a field accessor
Field field = FuzzyReflection.fromClass(instanceClass, forceAccess).getFieldByType(null, fieldClass);
return Accessors.getFieldAccessor(field);
}
/**
* Retrieve an accessor (in declared order) for every field of the givne type.
* @param instanceClass - the type of the instance to retrieve.
* @param fieldClass - type of the field(s) to retrieve.
* @param forceAccess - whether or not to look for private and protected fields.
* @return The accessors.
*/
public static FieldAccessor[] getFieldAccessorArray(Class<?> instanceClass, Class<?> fieldClass, boolean forceAccess) {
List<Field> fields = FuzzyReflection.fromClass(instanceClass, forceAccess).getFieldListByType(fieldClass);
FieldAccessor[] accessors = new FieldAccessor[fields.size()];
for (int i = 0; i < accessors.length; i++) {
accessors[i] = getFieldAccessor(fields.get(i));
}
return accessors;
}
/**
* Retrieve an accessor for the first field of the given type.
* @param instanceClass - the type of the instance to retrieve.
* @param fieldName - name of the field to retrieve.
* @param forceAccess - whether or not to look for private and protected fields.
* @return The value of that field.
* @throws IllegalArgumentException If the field cannot be found.
*/
public static FieldAccessor getFieldAccessor(Class<?> instanceClass, String fieldName, boolean forceAccess) {
return Accessors.getFieldAccessor(ExactReflection.fromClass(instanceClass, true).getField(fieldName));
}
/**
* Retrieve a field accessor from a given field that uses unchecked exceptions.
* @param field - the field.
* @return The field accessor.
*/
public static FieldAccessor getFieldAccessor(final Field field) {
return Accessors.getFieldAccessor(field, true);
}
/**
* Retrieve a field accessor from a given field that uses unchecked exceptions.
* @param field - the field.
* @param forceAccess - whether or not to skip Java access checking.
* @return The field accessor.
*/
public static FieldAccessor getFieldAccessor(final Field field, boolean forceAccess) {
field.setAccessible(true);
return new DefaultFieldAccessor(field);
}
/**
* Retrieve a field accessor for a field with the given name and equivalent type, or NULL.
* @param clazz - the declaration class.
* @param fieldName - the field name.
* @param fieldType - assignable field type.
* @return The field accessor, or NULL if not found.
*/
public static FieldAccessor getFieldAcccessorOrNull(Class<?> clazz, String fieldName, Class<?> fieldType) {
try {
FieldAccessor accessor = Accessors.getFieldAccessor(clazz, fieldName, true);
// Verify the type
if (fieldType.isAssignableFrom(accessor.getField().getType())) {
return accessor;
}
return null;
} catch (IllegalArgumentException e) {
return null;
}
}
/**
* Retrieve a method accessor for a field with the given name and equivalent type, or NULL.
* @param clazz - the declaration class.
* @param methodName - the method name.
* @return The method accessor, or NULL if not found.
*/
public static MethodAccessor getMethodAcccessorOrNull(Class<?> clazz, String methodName) {
try {
return Accessors.getMethodAccessor(clazz, methodName);
} catch (IllegalArgumentException e) {
return null;
}
}
/**
* Find a specific constructor in a class.
* @param clazz - the class.
* @param parameters - the signature of the constructor to find.
* @return The constructor, or NULL if not found.
*/
public static ConstructorAccessor getConstructorAccessorOrNull(Class<?> clazz, Class<?>... parameters) {
try {
return Accessors.getConstructorAccessor(clazz, parameters);
} catch (IllegalArgumentException e) {
return null; // Not found
}
}
/**
* Retrieve a field accessor that will cache the content of the field.
* <p>
* Note that we don't check if the underlying field has changed after the value has been cached,
* so it's best to use this on final fields.
* @param inner - the accessor.
* @return A cached field accessor.
*/
public static FieldAccessor getCached(final FieldAccessor inner) {
return new FieldAccessor() {
private final Object EMPTY = new Object();
private volatile Object value = EMPTY;
@Override
public void set(Object instance, Object value) {
inner.set(instance, value);
update(value);
}
@Override
public Object get(Object instance) {
Object cache = value;
if (cache != EMPTY)
return cache;
return update(inner.get(instance));
}
/**
* Update the cached value.
* @param value - the value to cache.
* @return The cached value.
*/
private Object update(Object value) {
return this.value = value;
}
@Override
public Field getField() {
return inner.getField();
}
};
}
/**
* Retrieve a field accessor where the write operation is synchronized on the current field value.
* @param accessor - the accessor.
* @return The field accessor.
*/
public static FieldAccessor getSynchronized(final FieldAccessor accessor) {
// Only wrap once
if (accessor instanceof SynchronizedFieldAccessor)
return accessor;
return new SynchronizedFieldAccessor(accessor);
}
/**
* Retrieve a method accessor that always return a constant value, regardless if input.
* @param returnValue - the constant return value.
* @param method - the method.
* @return A constant method accessor.
*/
public static MethodAccessor getConstantAccessor(final Object returnValue, final Method method) {
return new MethodAccessor() {
@Override
public Object invoke(Object target, Object... args) {
return returnValue;
}
@Override
public Method getMethod() {
return method;
}
};
}
/**
* Retrieve a method accessor for a method with the given name and signature.
* @param instanceClass - the parent class.
* @param methodName - the method name.
* @param parameters - the parameters.
* @return The method accessor.
*/
public static MethodAccessor getMethodAccessor(Class<?> instanceClass, String methodName, Class<?>... parameters) {
return new DefaultMethodAccessor(ExactReflection.fromClass(instanceClass, true).getMethod(methodName, parameters));
}
/**
* Retrieve a method accessor for a particular method, avoding checked exceptions.
* @param method - the method to access.
* @return The method accessor.
*/
public static MethodAccessor getMethodAccessor(final Method method) {
return getMethodAccessor(method, true);
}
/**
* Retrieve a method accessor for a particular method, avoding checked exceptions.
* @param method - the method to access.
* @param forceAccess - whether or not to skip Java access checking.
* @return The method accessor.
*/
public static MethodAccessor getMethodAccessor(final Method method, boolean forceAccess) {
method.setAccessible(forceAccess);
return new DefaultMethodAccessor(method);
}
/**
* Retrieve a constructor accessor for a constructor with the given signature.
* @param instanceClass - the parent class.
* @param parameters - the parameters.
* @return The constructor accessor.
* @throws IllegalArgumentException If we cannot find this constructor.
* @throws IllegalStateException If we cannot access reflection.
*/
public static ConstructorAccessor getConstructorAccessor(Class<?> instanceClass, Class<?>... parameters) {
try {
return getConstructorAccessor(instanceClass.getDeclaredConstructor(parameters));
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(String.format(
"Unable to find constructor %s(%s).", instanceClass, Joiner.on(",").join(parameters))
);
} catch (SecurityException e) {
throw new IllegalStateException("Cannot access constructors.", e);
}
}
/**
* Retrieve a constructor accessor for a particular constructor, avoding checked exceptions.
* @param constructor - the constructor to access.
* @return The method accessor.
*/
public static ConstructorAccessor getConstructorAccessor(final Constructor<?> constructor) {
return new DefaultConstrutorAccessor(constructor);
}
// Seal this class
private Accessors() {
}
}
| 32.983278
| 120
| 0.72176
|
a793220a7b42b8c6cb044a9d26a7eb5ad0442476
| 354
|
package ufc.constants;
/**
* <p>main configuration</p>
*/
public class Configuration_M {
public final static String ACTION = "/action";
public final static String PUBLIC_HTML = "/html/public/**";
public final static String HTML_TEMPLATES = "/html/templates/**";
public final static String AUTH_TOKEN = "X-Auth-Token";
}
| 25.285714
| 70
| 0.666667
|
e34dcb8d321d792f35e62f02c976434bc2e0e835
| 3,016
|
package com.leama.pdfbox.design.elements.render;
import com.leama.pdfbox.design.elements.render.VerticalLayoutHint.VerticalLayoutHintBuilder;
import com.leama.pdfbox.design.text.Alignment;
/**
* The column layout hint provides currently the same possibilities as the
* {@link VerticalLayoutHint}. See there for more details.
*/
public class ColumnLayoutHint extends VerticalLayoutHint {
public final static ColumnLayoutHint LEFT = new ColumnLayoutHint(
Alignment.Left);
public final static ColumnLayoutHint CENTER = new ColumnLayoutHint(
Alignment.Center);
public final static ColumnLayoutHint RIGHT = new ColumnLayoutHint(
Alignment.Right);
/**
* Creates a layout hint with {@link Alignment#Left left alignment}.
*/
public ColumnLayoutHint() {
super();
}
/**
* Creates a layout hint with the given alignment.
*
* @param alignment the element alignment.
*/
public ColumnLayoutHint(Alignment alignment) {
super(alignment);
}
/**
* Creates a layout hint with the given alignment and margins.
*
* @param alignment the element alignment.
* @param marginLeft the left alignment.
* @param marginRight the right alignment.
* @param marginTop the top alignment.
* @param marginBottom the bottom alignment.
*/
public ColumnLayoutHint(Alignment alignment, float marginLeft,
float marginRight, float marginTop, float marginBottom) {
super(alignment, marginLeft, marginRight, marginTop, marginBottom);
}
/**
* Creates a layout hint with the given alignment and margins.
*
* @param alignment the element alignment.
* @param marginLeft the left alignment.
* @param marginRight the right alignment.
* @param marginTop the top alignment.
* @param marginBottom the bottom alignment.
* @param resetY if <code>true</code>, the y coordinate will be reset to the
* point before layouting the element.
*/
public ColumnLayoutHint(Alignment alignment, float marginLeft,
float marginRight, float marginTop, float marginBottom,
boolean resetY) {
super(alignment, marginLeft, marginRight, marginTop, marginBottom,
resetY);
}
/**
* @return a {@link VerticalLayoutHintBuilder} for creating a
* {@link VerticalLayoutHint} using a fluent API.
*/
public static ColumnLayoutHintBuilder builder() {
return new ColumnLayoutHintBuilder();
}
/**
* A builder for creating a {@link VerticalLayoutHint} using a fluent API.
*/
public static class ColumnLayoutHintBuilder extends VerticalLayoutHintBuilder {
public ColumnLayoutHint build() {
return new ColumnLayoutHint(alignment, marginLeft, marginRight,
marginTop, marginBottom, resetY);
}
}
}
| 34.272727
| 92
| 0.654841
|
751d5c6a0e854065bf4d02254a79b6ffe77f9fd0
| 4,375
|
package com.stenway.wsv;
public class WsvSerializer {
private static boolean containsSpecialChar(String value) {
for (int i=0; i<value.length(); i++) {
char c = value.charAt(i);
if (c == '\n' || WsvChar.isWhitespace(c) || c == '"'
|| c == '#') {
return true;
}
}
return false;
}
public static void serializeValue(StringBuilder sb, String value) {
if (value==null) {
sb.append('-');
} else if (value.length() == 0) {
sb.append("\"\"");
} else if (value.equals("-")) {
sb.append("\"-\"");
} else if (containsSpecialChar(value)) {
sb.append('"');
for (int i=0; i<value.length(); i++) {
char c = value.charAt(i);
if (c == '\n') {
sb.append("\"/\"");
} else if(c == '"') {
sb.append("\"\"");
} else {
sb.append(c);
}
}
sb.append('"');
} else {
sb.append(value);
}
}
private static void serializeWhitespace(StringBuilder sb, String whitespace,
boolean isRequired) {
if (whitespace != null && whitespace.length() > 0) {
sb.append(whitespace);
} else if (isRequired) {
sb.append(" ");
}
}
private static void serializeValuesWithWhitespace(StringBuilder sb,
WsvLine line) {
if (line.Values == null) {
String whitespace = line.whitespaces[0];
serializeWhitespace(sb, whitespace, false);
return;
}
for (int i=0; i<line.Values.length; i++) {
String whitespace = null;
if (i < line.whitespaces.length) {
whitespace = line.whitespaces[i];
}
if (i == 0) {
serializeWhitespace(sb, whitespace, false);
} else {
serializeWhitespace(sb, whitespace, true);
}
serializeValue(sb, line.Values[i]);
}
if (line.whitespaces.length >= line.Values.length + 1) {
String whitespace = line.whitespaces[line.Values.length];
serializeWhitespace(sb, whitespace, false);
} else if (line.comment != null && line.Values.length > 0) {
sb.append(' ');
}
}
private static void serializeValuesWithoutWhitespace(StringBuilder sb,
WsvLine line) {
if (line.Values == null) {
return;
}
boolean isFollowingValue = false;
for (String value : line.Values) {
if (isFollowingValue) {
sb.append(' ');
} else {
isFollowingValue = true;
}
serializeValue(sb, value);
}
if (line.comment != null && line.Values.length > 0) {
sb.append(' ');
}
}
public static void serializeLine(StringBuilder sb, WsvLine line) {
if (line.whitespaces != null && line.whitespaces.length > 0) {
serializeValuesWithWhitespace(sb, line);
} else {
serializeValuesWithoutWhitespace(sb, line);
}
if (line.comment != null) {
sb.append('#');
sb.append(line.comment);
}
}
public static String serializeLine(WsvLine line) {
StringBuilder sb = new StringBuilder();
serializeLine(sb, line);
return sb.toString();
}
public static String serializeDocument(WsvDocument document) {
StringBuilder sb = new StringBuilder();
boolean isFirstLine = true;
for (WsvLine line : document.Lines) {
if (!isFirstLine) {
sb.append('\n');
} else {
isFirstLine = false;
}
serializeLine(sb, line);
}
return sb.toString();
}
public static String serializeLineNonPreserving(WsvLine line) {
StringBuilder sb = new StringBuilder();
serializeLine(sb, line.Values);
return sb.toString();
}
public static String serializeDocumentNonPreserving(WsvDocument document) {
StringBuilder sb = new StringBuilder();
boolean isFirstLine = true;
for (WsvLine line : document.Lines) {
if (!isFirstLine) {
sb.append('\n');
} else {
isFirstLine = false;
}
serializeLine(sb, line.Values);
}
return sb.toString();
}
public static void serializeLine(StringBuilder sb, String[] line) {
boolean isFirstValue = true;
for (String value : line) {
if (!isFirstValue) {
sb.append(' ');
} else {
isFirstValue = false;
}
serializeValue(sb, value);
}
}
public static String serializeLine(String... line) {
StringBuilder sb = new StringBuilder();
serializeLine(sb, line);
return sb.toString();
}
public static String serializeDocument(String[][] lines) {
StringBuilder sb = new StringBuilder();
boolean isFirstLine = true;
for (String[] line : lines) {
if (!isFirstLine) {
sb.append('\n');
} else {
isFirstLine = false;
}
serializeLine(sb, line);
}
return sb.toString();
}
}
| 23.777174
| 77
| 0.629714
|
1ef760e427b23b397dd35031bd894470023d91dc
| 578
|
package demo.library.rest;
import org.simpleflatmapper.map.annotation.Key;
public class Publisher {
@Key
private final Long id;
private String name;
public Publisher(Long id, String name) {
this.id = id;
this.name = name;
}
/**
* @return the id
*/
public Long getId() {
return id;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
}
| 15.210526
| 47
| 0.532872
|
7dc6555f65a10eb9e1da4a4232795e465e4c9ff3
| 2,617
|
package ui.gui.graphical.game;
import java.awt.image.BufferedImage;
import java.util.HashMap;
import logic.Direction;
import logic.Dragon;
import model.Position;
/**
* AnimatedSprite for dragons that die.
*/
public class DyingDragonSprite implements AnimatedSprite
{
/**
* Instantiates a new dying dragon sprite.
*
* @param u the dragon
* @param sprite the sprite
*/
public DyingDragonSprite(Dragon u, TiledImage sprite)
{
_dirMap.put(Direction.North, 0);
_dirMap.put(Direction.South, 1);
_dirMap.put(Direction.East, 2);
_dirMap.put(Direction.West, 3);
_unit = u;
_sprite = sprite;
}
/* (non-Javadoc)
* @see ui.gui.graphical.game.AnimatedSprite#Update(int)
*/
@Override
public void Update(int diff)
{
_timeCount++;
if (_playing && _timeCount == 3)
{
_frame += sprState.GetDeltaX();
if (_frame == sprState.GetNumFrames() - 1)
_playing = false;
_timeCount = 0;
}
}
/* (non-Javadoc)
* @see ui.gui.graphical.game.AnimatedSprite#GetCurrentImage()
*/
@Override
public BufferedImage GetCurrentImage()
{
return _sprite.GetTile(_frame, _dirMap.get(_unit.GetDirection()));
}
/* (non-Javadoc)
* @see ui.gui.graphical.game.AnimatedSprite#GetPosition()
*/
@Override
public Position GetPosition()
{
return _unit.GetPosition();
}
/* (non-Javadoc)
* @see ui.gui.graphical.game.AnimatedSprite#GetDeltaPosition(int, int)
*/
@Override
public Position GetDeltaPosition(int cell_width, int cell_height)
{
return new Position(0, 0);
}
/** The Constant sprite state. */
private static final SpriteState sprState = new SpriteState(11, 0, 0, 1, 0);
/** The frame. */
private int _frame = 0;
/** The time count. */
private int _timeCount = 0;
/** If is playing. */
private boolean _playing = true;
/** The sprite. */
private final TiledImage _sprite;
/** The unit. */
private final Dragon _unit;
/** The direction map. */
private final HashMap<Direction, Integer> _dirMap = new HashMap<Direction, Integer>();
/* (non-Javadoc)
* @see ui.gui.graphical.game.AnimatedSprite#IsAlive()
*/
@Override
public boolean IsAlive()
{
return true;
}
/* (non-Javadoc)
* @see ui.gui.graphical.game.AnimatedSprite#GetUnitId()
*/
@Override
public int GetUnitId()
{
return _unit.GetId();
}
}
| 22.367521
| 90
| 0.598777
|
5ae7392204927946f2da2c018fbd29fc64c4007d
| 1,233
|
package br.com.zup.transacao;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.math.BigDecimal;
import java.time.LocalDateTime;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.mockito.Mockito.when;
@ExtendWith(SpringExtension.class)
class ListenerDeTransacaoTest {
@InjectMocks
private ListenerDeTransacao listener;
@Mock
private TransacaoRepository transacaoRepository;
@Test
@DisplayName("Salva transacao")
void ouvir() {
TransacaoResponse transacaoResponse = new TransacaoResponse(
"id",
new BigDecimal(100),
new Estabelicimento("Nome", "Cidade", "Endereco"),
new Cartao("IdCartao", "email@test.com"),
LocalDateTime.now());
Transacao transacao = transacaoResponse.toModel();
when(transacaoRepository.save(transacao))
.thenReturn(transacao);
assertDoesNotThrow(() -> listener.ouvir(transacaoResponse));
}
}
| 30.073171
| 70
| 0.704785
|
0de96335fc317b0410ad9cdaeba6f890f6e4ade4
| 1,413
|
package com.project.entity.proxysale;
import com.project.entity.account.User;
import javax.persistence.*;
import java.util.Date;
/**
* 代销客栈移除记录
* @author hunhun
* 2015-11-16 15:20
*/
@Entity
@Table(name = "tomato_proxysale_inn_del_log")
public class ProxyInnDelLog {
private Integer id;
private ProxyInn proxyInn;
private Date delTime;
private User user;
private String reason;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinColumn(name = "proxy_inn")
public ProxyInn getProxyInn() {
return proxyInn;
}
public void setProxyInn(ProxyInn proxyInn) {
this.proxyInn = proxyInn;
}
@Column(name = "del_time")
public Date getDelTime() {
return delTime;
}
public void setDelTime(Date delTime) {
this.delTime = delTime;
}
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinColumn(name = "_user")
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
@Column
public String getReason() {
return reason;
}
public void setReason(String reason) {
this.reason = reason;
}
}
| 19.901408
| 65
| 0.625619
|
fb0819990c4524a9c93e6e26444b141aa38ecf52
| 5,879
|
package br.com.salon.carine.lima.services;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;
import br.com.salon.carine.lima.converters.ConvertersServico;
import br.com.salon.carine.lima.dto.ServicoDTO;
import br.com.salon.carine.lima.models.Servico;
import br.com.salon.carine.lima.repositoriessdp.ServicoRepositorySJPA;
import br.com.salon.carine.lima.response.Message;
import br.com.salon.carine.lima.response.ResponseServico;
@Service
public class ServicoService {
@Autowired
public ServicoRepositorySJPA servicoRepositorySJPA;
private Integer idLastServico = 0;
private Integer idFirstServico = 0;
public ResponseServico cadastrar(ServicoDTO servicoDTO) {
Servico servico = ConvertersServico.deServicoDTOparaServico(servicoDTO);
servicoRepositorySJPA.save(servico);
ResponseServico response = new ResponseServico();
response.setServico(servicoDTO);
response.setMessage(new Message("Servico", "Serviço cadastrado com sucesso"));
return response;
}
public List<ServicoDTO> listarTodos() {
List<Servico> servicos = servicoRepositorySJPA.findAll();
List<ServicoDTO> servicosDTO = new ArrayList<ServicoDTO>();
for (Servico servico : servicos) {
ServicoDTO servicoDTO = ConvertersServico.deServicoParaServicoDTO(servico);
servicosDTO.add(servicoDTO);
}
return servicosDTO;
}
public Servico buscarServicoPorId(Integer id) {
Optional<Servico> servicoOptional = servicoRepositorySJPA.findById(id);
if (servicoOptional.isPresent()) {
return servicoOptional.get();
}
return null;
}
public void remover(Integer id) {
Optional<Servico> optionalServico = this.servicoRepositorySJPA.findById(id);
if (optionalServico.isPresent()) {
Servico servico = optionalServico.get();
servicoRepositorySJPA.delete(servico);
}
}
public Page<Servico> previousPageService(boolean isFirst, Integer number) {
if (isFirst) {
int lastPage = (int) servicoRepositorySJPA.count() - 1;
PageRequest pageRequest = PageRequest.of(lastPage, 1);
Page<Servico> paginaAterior = servicoRepositorySJPA.findAll(pageRequest);
return paginaAterior;
} else {
PageRequest pageRequest = PageRequest.of(number - 1, 1);
Page<Servico> paginaAterior = servicoRepositorySJPA.findAll(pageRequest);
return paginaAterior;
}
}
public Page<Servico> nextPageService(boolean isLast, Integer number) {
if (isLast) {
PageRequest pageRequest = PageRequest.of(0, 1);
Page<Servico> paginaProxima = servicoRepositorySJPA.findAll(pageRequest);
return paginaProxima;
} else {
PageRequest pageRequest = PageRequest.of(number + 1, 1);
Page<Servico> paginaProxima = servicoRepositorySJPA.findAll(pageRequest);
return paginaProxima;
}
}
public ResponseServico alterarServico(ServicoDTO servicoDTO) {
Servico servico = ConvertersServico.deServicoDTOparaServico(servicoDTO);
servicoRepositorySJPA.save(servico);
ResponseServico response = new ResponseServico();
response.setServico(servicoDTO);
response.setMessage(new Message("Servico", "Serviço Alterado com sucesso"));
return response;
}
public Page<Servico> findPageServico(Integer page, Integer size) {
PageRequest pageRequest = PageRequest.of(page, size);
Page<Servico> pagesServico = servicoRepositorySJPA.findAll(pageRequest);
return pagesServico;
}
public Page<Servico> buscarServicoRowNumber(Integer rowNumber) {
PageRequest pageRequest = PageRequest.of(rowNumber, 1);
Page<Servico> pageServico = servicoRepositorySJPA.findAll(pageRequest);
return pageServico;
}
public List<Servico> filtrarServicoPorDescricao(String descricao) {
if (!descricao.equals("")) {
List<Servico> servicos = servicoRepositorySJPA.searchDescricaoFilter(descricao);
return servicos;
} else {
Page<Servico> findPageServicos = findPageServico(0, 5);
return findPageServicos.getContent();
}
}
public Servico nextServico(Integer idServicoAtual) {
if (isLastServico(idServicoAtual)) {
return firstServico();
} else {
Integer idServicoProximo = servicoRepositorySJPA.idServicoProximo(idServicoAtual);
return servicoRepositorySJPA.findById(idServicoProximo).get();
}
}
public boolean isLastServico(Integer idServicoAtual) {
if(idLastServico == idServicoAtual) {
return true;
}else {
return false;
}
}
public Servico firstServico() {
Integer idFirstServico = servicoRepositorySJPA.idFirstServico();
Optional<Servico> optionalServico = servicoRepositorySJPA.findById(idFirstServico);
if(optionalServico.isPresent()) {
return optionalServico.get();
}
return null;
}
public void atualizarLastId(Integer id) {
idLastServico = id;
}
public void atualizarFirstId(Integer id) {
idFirstServico = id;
}
public Integer idLastServico() {
return servicoRepositorySJPA.idlastServico();
}
public Integer idFirstServico() {
return servicoRepositorySJPA.idFirstServico();
}
public Servico previousServico(Integer idServicoAtual) {
if(isFirstServico(idServicoAtual)) {
return lastServico();
}else {
Integer idClienteAnterior = servicoRepositorySJPA.idServicoAnterior(idServicoAtual);
return servicoRepositorySJPA.findById(idClienteAnterior).get();
}
}
public boolean isFirstServico(Integer idServicoAtual) {
if(idFirstServico == idServicoAtual) {
return true;
}else {
return false;
}
}
public Servico lastServico() {
Integer idlastServico = servicoRepositorySJPA.idlastServico();
Optional<Servico> optionalServico = servicoRepositorySJPA.findById(idlastServico);
if(optionalServico.isPresent()) {
return optionalServico.get();
}
return null;
}
}
| 26.013274
| 87
| 0.763055
|
25cad5c613be0882e4eda78d5e58281197073f53
| 2,447
|
/*
*
* ****************************************************************************
* * Copyright (C) 2019 Testsigma Technologies Inc.
* * All rights reserved.
* ****************************************************************************
*
*/
package com.testsigma.controller;
import com.testsigma.dto.TestSuiteResultDTO;
import com.testsigma.exception.ResourceNotFoundException;
import com.testsigma.mapper.TestSuiteResultMapper;
import com.testsigma.model.TestSuiteResult;
import com.testsigma.service.TestSuiteResultService;
import com.testsigma.specification.TestSuiteResultSpecificationsBuilder;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@Log4j2
@RequestMapping(path = "/test_suite_results")
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class TestSuiteResultsController {
private final TestSuiteResultService testSuiteResultService;
private final TestSuiteResultMapper testSuiteResultMapper;
@RequestMapping(method = RequestMethod.GET)
public Page<TestSuiteResultDTO> index(TestSuiteResultSpecificationsBuilder builder, Pageable pageable) {
log.info("Request /test_suite_results/");
Specification<TestSuiteResult> spec = builder.build();
Page<TestSuiteResult> testSuiteResults = testSuiteResultService.findAll(spec, pageable);
List<TestSuiteResultDTO> testSuiteResultDTOS =
testSuiteResultMapper.mapDTO(testSuiteResults.getContent());
return new PageImpl<>(testSuiteResultDTOS, pageable, testSuiteResults.getTotalElements());
}
@RequestMapping(value = {"/{id}"}, method = RequestMethod.GET)
public TestSuiteResultDTO show(@PathVariable(value = "id") Long id) throws ResourceNotFoundException {
log.info("Request /test_suite_results/" + id);
TestSuiteResult testSuiteResult = testSuiteResultService.find(id);
return testSuiteResultMapper.mapDTO(testSuiteResult);
}
}
| 42.189655
| 106
| 0.762158
|
753a56465d45080ad4c2e7783d93991668834813
| 4,843
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.util;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.runtime.event.AbstractEvent;
import org.apache.flink.runtime.io.network.api.serialization.RecordDeserializer;
import org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer;
import org.apache.flink.runtime.io.network.buffer.Buffer;
import org.apache.flink.types.IntValue;
import org.junit.Assert;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
public interface TestConsumerCallback {
void onBuffer(Buffer buffer);
void onEvent(AbstractEvent event);
public static class CountingCallback implements TestConsumerCallback {
private final AtomicInteger numberOfReadBuffers = new AtomicInteger();
private final AtomicInteger numberOfReadEvents = new AtomicInteger();
@Override
public void onBuffer(Buffer buffer) {
numberOfReadBuffers.incrementAndGet();
}
@Override
public void onEvent(AbstractEvent event) {
numberOfReadEvents.incrementAndGet();
}
/**
* Returns the number of read buffers.
*/
public int getNumberOfReadBuffers() {
return numberOfReadBuffers.get();
}
/**
* Returns the number of read events;
*/
public int getNumberOfReadEvents() {
return numberOfReadEvents.get();
}
}
public static class RecyclingCallback extends CountingCallback {
@Override
public void onBuffer(Buffer buffer) {
super.onBuffer(buffer);
buffer.recycleBuffer();
}
@Override
public void onEvent(AbstractEvent event) {
super.onEvent(event);
}
}
public class VerifyAscendingCallback extends RecyclingCallback {
@Override
public void onBuffer(Buffer buffer) {
final MemorySegment segment = buffer.getMemorySegment();
int expected = getNumberOfReadBuffers() * (segment.size() / 4);
for (int i = 0; i < segment.size(); i += 4) {
assertEquals(expected, segment.getInt(i));
expected++;
}
super.onBuffer(buffer);
}
@Override
public void onEvent(AbstractEvent event) {
super.onEvent(event);
}
}
public class VerifyAscendingOnFixedStepCallback extends RecyclingCallback {
private final int startValue;
private final int stepWidth;
private final int totalRecordCount;
private int actualRecordCount;
private final RecordDeserializer<IntValue> deserializer =
new SpillingAdaptiveSpanningRecordDeserializer<>(new String[]{System.getProperty("java.io.tmpdir")});
private final Set<IntValue> totalData = new HashSet<>();
public VerifyAscendingOnFixedStepCallback(int startValue, int stepWidth, int totalRecordCount) {
this.startValue = startValue;
this.stepWidth = stepWidth;
this.totalRecordCount = totalRecordCount;
this.actualRecordCount = 0;
}
private void assertExpectedValue(int actualValue) {
Assert.assertTrue(actualValue % stepWidth == startValue);
Assert.assertTrue(((actualValue - startValue) / stepWidth) < totalRecordCount);
Assert.assertTrue(((actualValue - startValue) / stepWidth) >= 0);
Assert.assertTrue(!totalData.contains(actualValue));
totalData.add(new IntValue(actualValue));
}
public int getActualRecordCount() {
return actualRecordCount;
}
@Override
public void onBuffer(Buffer buffer) {
final MemorySegment segment = buffer.getMemorySegment();
try {
final IntValue target = new IntValue();
deserializer.setNextBuffer(buffer);
while (true) {
RecordDeserializer.DeserializationResult result = deserializer.getNextRecord(target);
if (result.isFullRecord()) {
assertExpectedValue(target.getValue());
actualRecordCount++;
}
if (result.isBufferConsumed()) {
break;
}
}
} catch (Exception e) {
Assert.assertTrue("Exception during deserialization: " + e.getMessage(), false);
}
super.onBuffer(buffer);
}
@Override
public void onEvent(AbstractEvent event) {
super.onEvent(event);
}
}
}
| 27.833333
| 104
| 0.741689
|
c376c166d076ba29e6346bd02398f854963f532f
| 289
|
package tarena.day05_array;
public class Animal {
String breed;
int age;
void eat() {
System.out.println("动物在吃人");
}
void makeNoice(int num) {
System.out.println("叫了N声");
}
public static void main(String[] args) {
Animal a = new Animal();
a.eat();
a.makeNoice(3);
}
}
| 14.45
| 41
| 0.643599
|
efc0ae16373688981819b6615703c66c04aecadc
| 2,071
|
/*
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.netty;
import java.io.IOException;
import com.google.common.util.concurrent.ListenableFuture;
/**
* Interface for low level messaging primitives.
*/
public interface MessagingService {
/**
* Sends a message asynchronously to the specified communication end point.
* The message is specified using the type and payload.
* @param ep end point to send the message to.
* @param type type of message.
* @param payload message payload bytes.
* @throws IOException when I/O exception of some sort has occurred
*/
public void sendAsync(Endpoint ep, String type, byte[] payload) throws IOException;
/**
* Sends a message synchronously and waits for a response.
* @param ep end point to send the message to.
* @param type type of message.
* @param payload message payload.
* @return a response future
* @throws IOException when I/O exception of some sort has occurred
*/
public ListenableFuture<byte[]> sendAndReceive(Endpoint ep, String type, byte[] payload) throws IOException;
/**
* Registers a new message handler for message type.
* @param type message type.
* @param handler message handler
*/
public void registerHandler(String type, MessageHandler handler);
/**
* Unregister current handler, if one exists for message type.
* @param type message type
*/
public void unregisterHandler(String type);
}
| 35.101695
| 112
| 0.707871
|
6e982d86df6afe3ae2d18e9278bbc2bd6b6e917c
| 3,127
|
/*
* Copyright 2018 Erik Amzallag
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.dajlab.jcontrollab.gui.model;
import java.util.HashMap;
import java.util.Map;
import org.dajlab.jcontrollab.core.OutputPortEnum;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
/**
* Model for a control lab.
*
* @author Erik Amzallag
*
*/
public class ConsoleModel {
/**
* Control lab number.
*/
private IntegerProperty number = new SimpleIntegerProperty();
/**
* Serial Port.
*/
private StringProperty port = new SimpleStringProperty();
/**
* Tab title.
*/
private StringProperty title = new SimpleStringProperty();
/**
* Map for each output model.
*/
private Map<OutputPortEnum, OutputModel> outputModelsMap = new HashMap<>(OutputPortEnum.values().length);
/**
* Constructor.
*
* @param number
* number
* @param title
* title
* @param port
* port
*/
public ConsoleModel(int number, String title, String port) {
this.number.set(number);
this.title.set(title);
this.port.set(port);
for (OutputPortEnum portEnum : OutputPortEnum.values()) {
OutputModel outModel = new OutputModel(portEnum);
outputModelsMap.put(portEnum, outModel);
}
}
/**
* @return the outputModelsMap
*/
public final Map<OutputPortEnum, OutputModel> getOutputModelsMap() {
return outputModelsMap;
}
/**
* @param outputModelsMap
* the outputModelsMap to set
*/
public final void setOutputModelsMap(Map<OutputPortEnum, OutputModel> outputModelsMap) {
this.outputModelsMap = outputModelsMap;
}
public final IntegerProperty numberProperty() {
return this.number;
}
public final int getNumber() {
return this.numberProperty().get();
}
public final void setNumber(final int number) {
this.numberProperty().set(number);
}
public final StringProperty portProperty() {
return this.port;
}
public final java.lang.String getPort() {
return this.portProperty().get();
}
public final void setPort(final java.lang.String port) {
this.portProperty().set(port);
}
public final StringProperty titleProperty() {
return this.title;
}
public final java.lang.String getTitle() {
return this.titleProperty().get();
}
public final void setTitle(final java.lang.String title) {
this.titleProperty().set(title);
}
}
| 24.622047
| 107
| 0.684042
|
20d72e64dd606c930d6fc801fcbf3266883ce08a
| 432
|
package ru.vzotov.purchases.domain.model;
import ru.vzotov.purchase.domain.model.Purchase;
import ru.vzotov.purchase.domain.model.PurchaseId;
import java.time.LocalDateTime;
import java.util.List;
public interface PurchaseRepository {
Purchase find(PurchaseId id);
void store(Purchase purchase);
List<Purchase> findByDate(LocalDateTime fromDateTime, LocalDateTime toDateTime);
boolean delete(PurchaseId id);
}
| 24
| 84
| 0.789352
|
e3197014af38877e4f2503abaa6c3a58d73ffbb8
| 2,163
|
package es.tid.pce.pcep.objects.subobjects;
import java.net.Inet4Address;
import java.net.UnknownHostException;
/**
* IPv4 prefix Subobject
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|X| Type = 1 | Length | IPv4 address (4 bytes) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| IPv4 address (continued) | Prefix Length | Attribute |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* @author Oscar Gonzalez de Dios
*/
public class IPv4PrefixXROSubobject extends XROSubobject{
public Inet4Address ipv4address;//IPv4 address
public int prefix;//IPv4 prefix
public IPv4PrefixXROSubobject(){
super();
this.setType(XROSubObjectValues.XRO_SUBOBJECT_IPV4PREFIX);
}
public IPv4PrefixXROSubobject(byte [] bytes, int offset){
super(bytes, offset);
decode();
}
public void encode(){
this.erosolength=8;
this.subobject_bytes=new byte[this.erosolength];
encodeSoHeader();
System.arraycopy(ipv4address.getAddress(), 0, this.subobject_bytes, 2, 4);
this.subobject_bytes[6]=(byte)prefix;
this.subobject_bytes[7]=(byte)attribute;
}
public void decode(){
byte[] ipadd=new byte[4];
System.arraycopy(this.subobject_bytes,2, ipadd, 0, 4);
try {
ipv4address=(Inet4Address)Inet4Address.getByAddress(ipadd);
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
prefix=this.subobject_bytes[6]&0xFF;
attribute=subobject_bytes[7]&0xFF;
}
public Inet4Address getIpv4address() {
return ipv4address;
}
public void setIpv4address(Inet4Address ipv4address) {
this.ipv4address = ipv4address;
}
public int getPrefix() {
return prefix;
}
public void setPrefix(int prefix) {
this.prefix = prefix;
}
public String toString(){
String resp=ipv4address+"/"+prefix;
return resp;
}
}
| 26.378049
| 78
| 0.570504
|
c8104aa20a445a7576f1cff2f8d527a1a3380957
| 15,793
|
package com.example.bozhilun.android.bzlmaps.sos;
import android.Manifest;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.net.Uri;
import android.os.Handler;
import android.os.Message;
import android.provider.CallLog;
import androidx.core.app.ActivityCompat;
import androidx.core.content.PermissionChecker;
import android.util.Log;
import com.example.bozhilun.android.Commont;
import com.example.bozhilun.android.MyApp;
import com.example.bozhilun.android.siswatch.utils.WatchUtils;
import com.example.bozhilun.android.util.PhoneUtile;
import com.suchengkeji.android.w30sblelibrary.utils.SharedPreferencesUtils;
import java.util.Date;
public class CallReceiver extends PhonecallReceiver {
@Override
protected void onIncomingCallReceived(Context ctx, String number, Date start) {
Log.d("----------AA", "onIncomingCallReceived");//来电(接收方)
}
@Override
protected void onIncomingCallAnswered(Context ctx, String number, Date start) {
Log.d("----------AA", "onIncomingCallAnswered");//来电接通(接收方)
}
@Override
protected void onIncomingCallEnded(Context ctx, String number, Date start, Date end) {
Log.d("----------AA", "onIncomingCallEnded");//接通后通话结束(接收方)
}
@Override
protected void onOutgoingCallStarted(Context ctx, String number, Date start) {
if (!WatchUtils.isEmpty(number)) Log.d("----------AA", "去点" + number);//去点未接通(发送方)
}
@Override
protected void onOutgoingCallEnded(Context ctx, String number, Date start, Date end) {
// 这里处理,挂断的相关操作,(发送方)
Log.d("----------AA", "未通或挂断 onOutgoingCallEnded");//去点未接通(发送方)
try {
if (!WatchUtils.isEmpty(number)) {
if (ActivityCompat.checkSelfPermission(ctx, Manifest.permission.READ_CALL_LOG)
== PackageManager.PERMISSION_GRANTED
// && ActivityCompat.checkSelfPermission(ctx, Manifest.permission.WRITE_CALL_LOG )
// == PackageManager.PERMISSION_GRANTED
) {
number = number.trim().replace(" ", "");
Commont.SENDPHONE_COUNT++;
getCallLogState(ctx, number);
}
}
}catch (Exception e){
e.printStackTrace();
}
}
@Override
protected void onMissedCall(Context ctx, String number, Date start) {
Log.d("----------AA", "onMissedCall");//来电未接通(接收方)
}
/**
* 查询最后一次的通话时长---- 确定通话是否成功
*
* @param context
* @param number
* @return
*/
private void getCallLogState(Context context, String number) {
try {
if (WatchUtils.isEmpty(number)) return;
ContentResolver cr = context.getContentResolver();
PermissionChecker.checkSelfPermission(context, Manifest.permission.READ_CALL_LOG);
final Cursor cursor = cr.query(CallLog.Calls.CONTENT_URI,
new String[]{CallLog.Calls.NUMBER, CallLog.Calls.TYPE, CallLog.Calls.DURATION},
CallLog.Calls.NUMBER + "=?",
new String[]{number},
CallLog.Calls.DATE + " desc");
int i = 0;
if (cursor != null)
while (cursor.moveToNext()) {
if (i == 0) {//第一个记录 也就是当前这个电话的记录
int durationIndex = cursor.getColumnIndex(CallLog.Calls.DURATION);
long durationTime = cursor.getLong(durationIndex);
if (durationTime > 0) {
Log.d("----------AA", "第一次查询 接通了 时长= " + durationTime);
} else {
Log.d("----------AA", "第一次查询 没接通");
}
}
i++;
}
if (cursor != null) cursor.close();
Message message = new Message();
message.what = 0x01;
message.obj = number;
if (handler != null) handler.sendMessageDelayed(message, 5000);
}catch (Exception e){
e.printStackTrace();
}
}
/**
* 查询最后一次的通话时长---- 确定通话是否成功
*
* @param context
* @param number
* @return
*/
private boolean getCallLogStateBoolean(Context context, String number) {
Log.d("----------AA", "小米手机第一次查询不对,所以查询两次,根据第二次为标准");
try {
if (WatchUtils.isEmpty(number)) return false;
boolean isLink = false;
ContentResolver cr = context.getContentResolver();
PermissionChecker.checkSelfPermission(context, Manifest.permission.READ_CALL_LOG);
final Cursor cursor = cr.query(CallLog.Calls.CONTENT_URI,
new String[]{CallLog.Calls.NUMBER, CallLog.Calls.TYPE, CallLog.Calls.DURATION},
CallLog.Calls.NUMBER + "=?",
new String[]{number},
CallLog.Calls.DATE + " desc");
int i = 0;
if(cursor == null)
return false;
while (cursor.moveToNext()) {
if (i == 0) {//第一个记录 也就是当前这个电话的记录
int durationIndex = cursor.getColumnIndex(CallLog.Calls.DURATION);
long durationTime = cursor.getLong(durationIndex);
if (durationTime > 0) {
Log.d("----------AA", "第二次查询 接通了 时长= " + durationTime);
isLink = true;
} else {
Log.d("----------AA", "第二次查询 这是else里");
isLink = false;
}
}
i++;
}
cursor.close();
return isLink;
}catch (Exception e){
e.printStackTrace();
return false;
}
}
//点击事件调用的类
protected void call(final String tel) {
try {
//直接拨打
Log.d("GPS", "call:" + tel);
Uri uri = Uri.parse("tel:" + tel);
Intent intent = new Intent(Intent.ACTION_CALL, uri);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (ActivityCompat.checkSelfPermission(MyApp.getInstance(), Manifest.permission.CALL_PHONE) != PackageManager.PERMISSION_GRANTED) {
return;
}
MyApp.getInstance().startActivity(intent);
}catch (Exception e){
e.printStackTrace();
}
}
// //标记位置防止多次调用onchange
// public int getLastCallId(Context context) {
// try {
// if (ActivityCompat.checkSelfPermission(context, Manifest.permission.READ_CALL_LOG) != PackageManager.PERMISSION_GRANTED) {
// return -1;
// }
// Cursor cur = context.getContentResolver().query(outSMSUri, null, null, null, CallLog.Calls.DATE + " desc");
// cur.moveToFirst();
// int lastMsgId = cur.getInt(cur.getColumnIndex("_id"));
// return lastMsgId;
// } catch (Exception e) {
// e.printStackTrace();
// }
// return -1;
// }
//
//// private static volatile int initialPos;
// private static final Uri outSMSUri = CallLog.Calls.CONTENT_URI;
// protected void queryLastCall(Context context, String address) {
// try {
// if (ActivityCompat.checkSelfPermission(context, Manifest.permission.READ_CALL_LOG) != PackageManager.PERMISSION_GRANTED) {
// return;
// }
// Cursor cur = context.getContentResolver().query(outSMSUri, null, null, null, CallLog.Calls.DEFAULT_SORT_ORDER);//CallLog.Calls.DATE + " desc");
// if (cur.moveToNext()) {
//// if (initialPos != getLastCallId(context)) {
// if (!TextUtils.isEmpty(address)) {
// if (cur.getString(cur.getColumnIndex("number")).contains(address)) {
// int _id = cur.getInt(cur.getColumnIndex("_id"));
// int type = cur.getInt(cur.getColumnIndex("type"));//通话类型,1 来电 .INCOMING_TYPE;2 已拨 .OUTGOING_;3 未接 .MISSED_
// String number = cur.getString(cur.getColumnIndex("number"));// 电话号码
// int duration = cur.getInt(cur.getColumnIndex("duration"));//通话时长,单位:秒
// String last_modified = cur.getString(cur.getColumnIndex("last_modified"));
//
// String msgObj = "\nID:" + _id + "\n类型:" + type + "\n号码:" + number + "\n时长:" + duration + "====" + last_modified;
// Log.e("----------AA","查到的数据"+ msgObj);
//
// if (type == 2) {
// if (duration > 0) {
// Log.e("----------AA", "大于");
// } else {
//
// Log.e("----------AA", "小于");
// }
// }
//
// }
// }
//// initialPos = getLastCallId(context);
//// }
//
// }
// cur.close();
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
private Handler handler = new Handler(new Handler.Callback() {
@Override
public boolean handleMessage(Message message) {
boolean isSos = (boolean) SharedPreferencesUtils.getParam(MyApp.getContext(), Commont.ISHelpe, false);//sos
String stringpersonOne = (String) SharedPreferencesUtils.getParam(MyApp.getInstance(), "personOne", "").toString().trim().replace(" ", "");
String stringpersonTwo = (String) SharedPreferencesUtils.getParam(MyApp.getInstance(), "personTwo", "").toString().trim().replace(" ", "");
String stringpersonThree = (String) SharedPreferencesUtils.getParam(MyApp.getInstance(), "personThree", "").toString().trim().replace(" ", "");
boolean callLogStateBoolean = false;
switch (message.what) {
case 0x01:
String number1 = message.obj.toString();
if (!WatchUtils.isEmpty(number1)) {
Log.d("----------AA", "消息指" + number1);
handler.removeMessages(0x01);
if (PhoneUtile.SIMTYPE(MyApp.getInstance()) == 0) {// 国内 移动,联通
callLogStateBoolean = getCallLogStateBoolean(MyApp.getInstance(), number1);
if (!callLogStateBoolean) {//没接通状态
handler.obtainMessage(0x02, number1).sendToTarget();
} else {//接通后,后面的不在重复
Commont.SENDPHONE_COUNT = 4;
//Commont.isGPSed = false;
Commont.IS_RING_SOS = false;
}
} else {//---其他
handler.obtainMessage(0x02, number1).sendToTarget();
}
}
break;
case 0x02:
String number = message.obj.toString();
if (!WatchUtils.isEmpty(number)) {
Log.d("----------AA", stringpersonOne + "-----" + stringpersonTwo + "=======" + stringpersonThree);
handler.removeMessages(0x02);
if (isSos && (stringpersonOne.equals(number)
|| stringpersonTwo.equals(number)
|| stringpersonThree.equals(number))) {
Log.d("----------AA", "没打通啊" + Commont.SENDPHONE_COUNT);//去点未接通
if (Commont.SENDPHONE_COUNT <= 2) {
if (stringpersonOne.equals(number)
&& stringpersonTwo.equals(number)
&& stringpersonThree.equals(number)) {
call(stringpersonOne);
} else if (stringpersonOne.equals(number)
&& stringpersonTwo.equals(number)) {
if (!WatchUtils.isEmpty(stringpersonThree)) {
call(stringpersonThree);
} else if (!WatchUtils.isEmpty(stringpersonOne)) {
call(stringpersonOne);
}
} else if (stringpersonOne.equals(number)
&& stringpersonThree.equals(number)) {
if (!WatchUtils.isEmpty(stringpersonTwo)) {
call(stringpersonTwo);
} else if (!WatchUtils.isEmpty(stringpersonOne)) {
call(stringpersonOne);
}
} else if (stringpersonTwo.equals(number)
&& stringpersonThree.equals(number)) {
if (!WatchUtils.isEmpty(stringpersonOne)) {
call(stringpersonOne);
} else if (!WatchUtils.isEmpty(stringpersonThree)) {
call(stringpersonThree);
}
} else if (stringpersonOne.equals(number)) {
if (!WatchUtils.isEmpty(stringpersonTwo)) {
call(stringpersonTwo);
} else if (!WatchUtils.isEmpty(stringpersonThree)) {
call(stringpersonThree);
} else if (!WatchUtils.isEmpty(stringpersonOne)) {
call(stringpersonOne);
}
} else if (stringpersonTwo.equals(number)) {
if (!WatchUtils.isEmpty(stringpersonThree)) {
call(stringpersonThree);
} else if (!WatchUtils.isEmpty(stringpersonOne)) {
call(stringpersonOne);
} else if (!WatchUtils.isEmpty(stringpersonTwo)) {
call(stringpersonTwo);
}
} else if (stringpersonThree.equals(number)) {
if (!WatchUtils.isEmpty(stringpersonOne)) {
call(stringpersonOne);
} else if (!WatchUtils.isEmpty(stringpersonTwo)) {
call(stringpersonTwo);
} else if (!WatchUtils.isEmpty(stringpersonThree)) {
call(stringpersonThree);
}
}
} else {
Commont.SENDPHONE_COUNT = 4;
//Commont.isGPSed = false;
Commont.IS_RING_SOS = false;
}
}
}
break;
}
return false;
}
});
}
| 45.252149
| 157
| 0.47578
|
344a5b71d4554a37bfd38128be5cb985fda13737
| 10,663
|
// Decompiled by Jad v1.5.8g. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package com.itextpdf.text.io;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.Iterator;
import java.util.LinkedList;
// Referenced classes of package com.itextpdf.text.io:
// GroupedRandomAccessSource, RandomAccessSource, MappedChannelRandomAccessSource
class PagedChannelRandomAccessSource extends GroupedRandomAccessSource
implements RandomAccessSource
{
private static class MRU
{
public Object enqueue(Object obj)
{
if(queue.size() <= 0 || queue.getFirst() != obj)
//* 0 0:aload_0
//* 1 1:getfield #23 <Field LinkedList queue>
//* 2 4:invokevirtual #32 <Method int LinkedList.size()>
//* 3 7:ifle 23
//* 4 10:aload_0
//* 5 11:getfield #23 <Field LinkedList queue>
//* 6 14:invokevirtual #36 <Method Object LinkedList.getFirst()>
//* 7 17:aload_1
//* 8 18:if_acmpne 23
//* 9 21:aconst_null
//* 10 22:areturn
{
for(Iterator iterator = queue.iterator(); iterator.hasNext();)
//* 11 23:aload_0
//* 12 24:getfield #23 <Field LinkedList queue>
//* 13 27:invokevirtual #40 <Method Iterator LinkedList.iterator()>
//* 14 30:astore_2
//* 15 31:aload_2
//* 16 32:invokeinterface #46 <Method boolean Iterator.hasNext()>
//* 17 37:ifeq 66
if(obj == iterator.next())
//* 18 40:aload_1
//* 19 41:aload_2
//* 20 42:invokeinterface #49 <Method Object Iterator.next()>
//* 21 47:if_acmpne 31
{
iterator.remove();
// 22 50:aload_2
// 23 51:invokeinterface #52 <Method void Iterator.remove()>
queue.addFirst(obj);
// 24 56:aload_0
// 25 57:getfield #23 <Field LinkedList queue>
// 26 60:aload_1
// 27 61:invokevirtual #56 <Method void LinkedList.addFirst(Object)>
return ((Object) (null));
// 28 64:aconst_null
// 29 65:areturn
}
queue.addFirst(obj);
// 30 66:aload_0
// 31 67:getfield #23 <Field LinkedList queue>
// 32 70:aload_1
// 33 71:invokevirtual #56 <Method void LinkedList.addFirst(Object)>
if(queue.size() > limit)
//* 34 74:aload_0
//* 35 75:getfield #23 <Field LinkedList queue>
//* 36 78:invokevirtual #32 <Method int LinkedList.size()>
//* 37 81:aload_0
//* 38 82:getfield #25 <Field int limit>
//* 39 85:icmple 21
return queue.removeLast();
// 40 88:aload_0
// 41 89:getfield #23 <Field LinkedList queue>
// 42 92:invokevirtual #59 <Method Object LinkedList.removeLast()>
// 43 95:areturn
}
return ((Object) (null));
}
private final int limit;
private LinkedList queue;
public MRU(int i)
{
// 0 0:aload_0
// 1 1:invokespecial #18 <Method void Object()>
queue = new LinkedList();
// 2 4:aload_0
// 3 5:new #20 <Class LinkedList>
// 4 8:dup
// 5 9:invokespecial #21 <Method void LinkedList()>
// 6 12:putfield #23 <Field LinkedList queue>
limit = i;
// 7 15:aload_0
// 8 16:iload_1
// 9 17:putfield #25 <Field int limit>
// 10 20:return
}
}
public PagedChannelRandomAccessSource(FileChannel filechannel)
throws IOException
{
this(filechannel, 0x4000000, 16);
// 0 0:aload_0
// 1 1:aload_1
// 2 2:ldc1 #14 <Int 0x4000000>
// 3 4:bipush 16
// 4 6:invokespecial #27 <Method void PagedChannelRandomAccessSource(FileChannel, int, int)>
// 5 9:return
}
public PagedChannelRandomAccessSource(FileChannel filechannel, int i, int j)
throws IOException
{
super(buildSources(filechannel, i / j));
// 0 0:aload_0
// 1 1:aload_1
// 2 2:iload_2
// 3 3:iload_3
// 4 4:idiv
// 5 5:invokestatic #33 <Method RandomAccessSource[] buildSources(FileChannel, int)>
// 6 8:invokespecial #36 <Method void GroupedRandomAccessSource(RandomAccessSource[])>
channel = filechannel;
// 7 11:aload_0
// 8 12:aload_1
// 9 13:putfield #38 <Field FileChannel channel>
bufferSize = i / j;
// 10 16:aload_0
// 11 17:iload_2
// 12 18:iload_3
// 13 19:idiv
// 14 20:putfield #40 <Field int bufferSize>
mru = new MRU(j);
// 15 23:aload_0
// 16 24:new #8 <Class PagedChannelRandomAccessSource$MRU>
// 17 27:dup
// 18 28:iload_3
// 19 29:invokespecial #43 <Method void PagedChannelRandomAccessSource$MRU(int)>
// 20 32:putfield #45 <Field PagedChannelRandomAccessSource$MRU mru>
// 21 35:return
}
private static RandomAccessSource[] buildSources(FileChannel filechannel, int i)
throws IOException
{
long l = filechannel.size();
// 0 0:aload_0
// 1 1:invokevirtual #51 <Method long FileChannel.size()>
// 2 4:lstore 4
if(l <= 0L)
//* 3 6:lload 4
//* 4 8:lconst_0
//* 5 9:lcmp
//* 6 10:ifgt 23
throw new IOException("File size must be greater than zero");
// 7 13:new #24 <Class IOException>
// 8 16:dup
// 9 17:ldc1 #53 <String "File size must be greater than zero">
// 10 19:invokespecial #56 <Method void IOException(String)>
// 11 22:athrow
int k = (int)(l / (long)i);
// 12 23:lload 4
// 13 25:iload_1
// 14 26:i2l
// 15 27:ldiv
// 16 28:l2i
// 17 29:istore_3
int j;
MappedChannelRandomAccessSource amappedchannelrandomaccesssource[];
if(l % (long)i == 0L)
//* 18 30:lload 4
//* 19 32:iload_1
//* 20 33:i2l
//* 21 34:lrem
//* 22 35:lconst_0
//* 23 36:lcmp
//* 24 37:ifne 97
j = 0;
// 25 40:iconst_0
// 26 41:istore_2
else
//* 27 42:iload_3
//* 28 43:iload_2
//* 29 44:iadd
//* 30 45:istore_3
//* 31 46:iload_3
//* 32 47:anewarray MappedChannelRandomAccessSource[]
//* 33 50:astore 8
//* 34 52:iconst_0
//* 35 53:istore_2
//* 36 54:iload_2
//* 37 55:iload_3
//* 38 56:icmpge 102
//* 39 59:iload_2
//* 40 60:i2l
//* 41 61:iload_1
//* 42 62:i2l
//* 43 63:lmul
//* 44 64:lstore 6
//* 45 66:aload 8
//* 46 68:iload_2
//* 47 69:new #58 <Class MappedChannelRandomAccessSource>
//* 48 72:dup
//* 49 73:aload_0
//* 50 74:lload 6
//* 51 76:lload 4
//* 52 78:lload 6
//* 53 80:lsub
//* 54 81:iload_1
//* 55 82:i2l
//* 56 83:invokestatic #64 <Method long Math.min(long, long)>
//* 57 86:invokespecial #67 <Method void MappedChannelRandomAccessSource(FileChannel, long, long)>
//* 58 89:aastore
//* 59 90:iload_2
//* 60 91:iconst_1
//* 61 92:iadd
//* 62 93:istore_2
//* 63 94:goto 54
j = 1;
// 64 97:iconst_1
// 65 98:istore_2
k += j;
amappedchannelrandomaccesssource = new MappedChannelRandomAccessSource[k];
for(j = 0; j < k; j++)
{
long l1 = (long)j * (long)i;
amappedchannelrandomaccesssource[j] = new MappedChannelRandomAccessSource(filechannel, l1, Math.min(l - l1, i));
}
//* 66 99:goto 42
return ((RandomAccessSource []) (amappedchannelrandomaccesssource));
// 67 102:aload 8
// 68 104:areturn
}
public void close()
throws IOException
{
super.close();
// 0 0:aload_0
// 1 1:invokespecial #71 <Method void GroupedRandomAccessSource.close()>
channel.close();
// 2 4:aload_0
// 3 5:getfield #38 <Field FileChannel channel>
// 4 8:invokevirtual #72 <Method void FileChannel.close()>
// 5 11:return
}
protected int getStartingSourceIndex(long l)
{
return (int)(l / (long)bufferSize);
// 0 0:lload_1
// 1 1:aload_0
// 2 2:getfield #40 <Field int bufferSize>
// 3 5:i2l
// 4 6:ldiv
// 5 7:l2i
// 6 8:ireturn
}
protected void sourceInUse(RandomAccessSource randomaccesssource)
throws IOException
{
((MappedChannelRandomAccessSource)randomaccesssource).open();
// 0 0:aload_1
// 1 1:checkcast #58 <Class MappedChannelRandomAccessSource>
// 2 4:invokevirtual #79 <Method void MappedChannelRandomAccessSource.open()>
// 3 7:return
}
protected void sourceReleased(RandomAccessSource randomaccesssource)
throws IOException
{
randomaccesssource = (RandomAccessSource)mru.enqueue(((Object) (randomaccesssource)));
// 0 0:aload_0
// 1 1:getfield #45 <Field PagedChannelRandomAccessSource$MRU mru>
// 2 4:aload_1
// 3 5:invokevirtual #84 <Method Object PagedChannelRandomAccessSource$MRU.enqueue(Object)>
// 4 8:checkcast #6 <Class RandomAccessSource>
// 5 11:astore_1
if(randomaccesssource != null)
//* 6 12:aload_1
//* 7 13:ifnull 22
randomaccesssource.close();
// 8 16:aload_1
// 9 17:invokeinterface #85 <Method void RandomAccessSource.close()>
// 10 22:return
}
public static final int DEFAULT_MAX_OPEN_BUFFERS = 16;
public static final int DEFAULT_TOTAL_BUFSIZE = 0x4000000;
private final int bufferSize;
private final FileChannel channel;
private final MRU mru;
}
| 35.307947
| 115
| 0.530714
|
e011ff0d1eefc3f1d3fbcd4c7adc16163bf846f8
| 695
|
package io.changock.driver.api.driver;
import io.changock.driver.api.common.Validable;
import io.changock.driver.api.entry.ChangeEntry;
import io.changock.driver.api.entry.ChangeEntryService;
import io.changock.driver.api.lock.LockManager;
import java.util.Set;
public interface ConnectionDriver<CHANGE_ENTRY extends ChangeEntry> extends Validable {
boolean isInitialized();
void initialize();
LockManager getLockManager();
LockManager getAndAcquireLockManager();
ChangeEntryService<CHANGE_ENTRY> getChangeEntryService();
Set<ChangeSetDependency> getDependencies();
ForbiddenParametersMap getForbiddenParameters();
Class getLegacyMigrationChangeLogClass(boolean runAlways);
}
| 34.75
| 87
| 0.825899
|
8c3510a35d9e61293a7f90618b54d37bf469594a
| 239
|
package com.daniel.weixin.mp.api;
/**
* 消息匹配器,用在消息路由的时候
*/
public interface WxMpMessageMatcher {
/**
* 消息是否匹配某种模式
* @param message
* @return
*/
public boolean match(com.daniel.weixin.mp.bean.WxMpXmlMessage message);
}
| 14.9375
| 73
| 0.677824
|
b50aa5ea80b09afb76c1048a6a04083c10376ea7
| 1,196
|
package com.zarbosoft.merman.core;
public abstract class IterationTask implements Comparable<IterationTask> {
public boolean destroyed = false;
protected double priority() {
return 0;
}
protected abstract boolean runImplementation(IterationContext iterationContext);
public boolean run(final IterationContext iterationContext) {
if (destroyed) return false;
final boolean out = runImplementation(iterationContext);
if (!out) destroy();
return out;
}
@Override
public int compareTo(final IterationTask t) {
return -Double.compare(priority(), t.priority());
}
public void destroy() {
if (destroyed) throw new AssertionError();
destroyed();
destroyed = true;
}
protected abstract void destroyed();
public static class P {
public static final double coursePlace = 170;
public static final double courseCompact = 165;
public static final double wallAdjust = 160;
public static final double layBricks = 150;
public static final double notifyBricks = 140;
public static final double wallCompact = 110;
public static final double courseExpand = -95;
public static final double wallExpand = -100;
}
}
| 27.813953
| 82
| 0.722408
|
30bcf9abda5ba2b435d4a9b479ccc06bc6a3dbf8
| 6,013
|
package org.tourgune.emocionometro;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import org.tourgune.emocionometro.bean.SurveyBean;
import org.tourgune.emocionometro.dao.SurveyDAO;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
public class History extends Activity implements OnClickListener {
private Button commitButton;
private static final int commitButtonId = R.id.historyCommitTxt;
private Button deleteButton;
private static final int deleteButtonId = R.id.historyDropButton;
private ListView list;
private static final int listId = R.id.historyListView;
ArrayList<Item> itemList;
ItemsAdapter listAdapter;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.history);
initVariables();
listeners();
listAdapter();
}
public void initVariables(){
commitButton=(Button) findViewById(commitButtonId);
deleteButton=(Button) findViewById(deleteButtonId);
list=(ListView) findViewById(listId);
}
public void listeners(){
commitButton.setOnClickListener(this);
deleteButton.setOnClickListener(this);
}
public class Item {
public Drawable image;
public int valueArousal;
public int valuePleasure;
public int valueDominance;
public int userId;
public long time;
public Integer listIndex;
public Item(int userId, long time, int valueArousal,int valuePleasure,int valueDominance) {
this.userId = userId;
this.time=time;
this.valueArousal=valueArousal;
this.valuePleasure=valuePleasure;
this.valueDominance=valueDominance;
}
}
public void listAdapter() {
itemList = new ArrayList<Item>();
ArrayList<SurveyBean> surveyList;
surveyList = SurveyDAO.instance(this).getSurveyList();
for (int i = 0; i < surveyList.size(); i++) {
SurveyBean surveyAux=surveyList.get(i);
Item itemAux=new Item(surveyAux.getUserId(),surveyAux.getTime(),surveyAux.getValueArousal(), surveyAux.getValuePleasure(), surveyAux.getValueDominance());
itemList.add(itemAux);
}
listAdapter = new ItemsAdapter(this, R.layout.listitemnew, itemList);
list.setAdapter(listAdapter);
}
private class ItemsAdapter extends ArrayAdapter<Item> {
private ArrayList<Item> items;
public ItemsAdapter(Context context, int textViewResourceId,
ArrayList<Item> items) {
super(context, textViewResourceId, items);
this.items = items;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if (v == null) {
LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
v = vi.inflate(R.layout.listitemnew, null);
}
Item it = items.get(position);
if (it != null) {
TextView textHate = (TextView) v.findViewById(R.id.listItemValueHate);
TextView textHappiness = (TextView) v.findViewById(R.id.listItemValueHappiness);
TextView textSurprise = (TextView) v.findViewById(R.id.listItemValueSurprise);
TextView it1 = (TextView) v.findViewById(R.id.listItemUserId);
TextView it2 = (TextView) v.findViewById(R.id.listItemTime);
if (textHate != null) {
it1.setText(Integer.toString(it.userId));
SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss");
String hour = formatter.format(it.time);
it2.setText(hour);
// iv.setImageDrawable(getValueIcon(it.value));
textHate.setText(getResources().getString(R.string.emotionsurveyArousal)+": "+Float.toString(getRealValue(it.valueArousal)));
textHappiness.setText(getResources().getString(R.string.emotionsurveyPleasure)+": "+Float.toString(getRealValue(it.valuePleasure)));
textSurprise.setText(getResources().getString(R.string.emotionsurveyDominance)+": "+Float.toString(getRealValue(it.valueDominance)));
}
v.setBackgroundColor(android.graphics.Color.BLACK);
it1.setTextColor(android.graphics.Color.WHITE);
it2.setTextColor(android.graphics.Color.WHITE);
textHate.setTextColor(android.graphics.Color.WHITE);
textHappiness.setTextColor(android.graphics.Color.WHITE);
textSurprise.setTextColor(android.graphics.Color.WHITE);
}
return v;
}
}
public int getRealValue(int value){
return value/10;
}
public void resetDatabase(){
SurveyDAO.instance(this).dropDatabase();
SurveyDAO.instance(this).createDatabase();
listAdapter();
}
public void showToast(String text, int duration) {
Toast subscribedToast = new Toast(this);
subscribedToast = Toast.makeText(this, text, duration);
subscribedToast.setGravity(Gravity.CENTER, 0, 0);
subscribedToast.show();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case commitButtonId:
String commitResult=SurveyDAO.instance(this).backupSurveysToSd(this,"");
showToast(commitResult, 0);
break;
case deleteButtonId:
AlertDialog.Builder ad = new AlertDialog.Builder(this);
ad.setTitle(getString(R.string.DeleteWarningTitle));
ad.setMessage(getString(R.string.DeleteWarning));
ad.setNegativeButton(getString(R.string.noString), new Dialog.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
}
});
ad.setPositiveButton(getString(R.string.yesString), new Dialog.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
resetDatabase();
}
});
ad.show();
break;
}
}
}
| 27.331818
| 157
| 0.732579
|
30fc7636fee6a0e1310a80e63b8acc96bdde8d1f
| 706
|
package huawei;
import java.util.Scanner;
/**
* @author WuPing
* @version 2016年10月4日 下午7:12:51
*/
public class Yuan100Buy100Chickens_58 {
public static void main(String[] args) {
Scanner cin = new Scanner(System.in);
while (cin.hasNext()) {
cin.nextInt();
int cock = 0; //公鸡
int hen = 0; //母鸡
int chick = 0; //小鸡
int sum = 0;
for(int i=0; i<100/5; i++) {
for(int j=0; j<100/3; j++) {
cock = i;
hen = j;
chick = 100 - cock - hen;
if(chick % 3 == 0) {
sum = cock * 5 + hen * 3 + chick / 3;
if(sum == 100) {
System.out.println(cock +" "+ hen +" "+ chick);
}
}
}
}
}
cin.close();
}
}
| 17.65
| 54
| 0.492918
|
c0d7b8b8e72b0e62d38e6ac028cbed4d35b3ceef
| 879
|
package com.in28minutes.business;
import java.util.List;
import java.util.stream.Collectors;
import com.in28minutes.data.api.TodoService;
public class TodoBusinessImpl {
private TodoService todoService;
TodoBusinessImpl(TodoService todoService) {
this.todoService = todoService;
}
public List<String> retrieveTodosRelatedToSpring(String user) {
List<String> todoList = todoService.retrieveTodos(user);
return todoList
.stream()
.filter(todo -> todo.contains("Spring"))
.collect(Collectors.toList());
}
public void deleteTodosNotRelatedToSpring(String user) {
List<String> todoList = todoService.retrieveTodos(user);
todoList
.stream()
.filter(todo -> !todo.contains("Spring"))
.forEach(todoService::deleteTodo);
}
}
| 24.416667
| 67
| 0.649602
|
71bd60f84714b38e762369debf45b281bd4aa90b
| 2,583
|
/********************************************************************************
* Copyright (c) 2014-2018 WANdisco
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Apache License, Version 2.0
*
********************************************************************************/
package com.google.gerrit.gerritconsoleapi.bindings;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.server.cache.CacheBackend;
import com.google.gerrit.server.cache.CacheBinding;
import com.google.gerrit.server.cache.PersistentCacheDef;
import com.google.gerrit.server.cache.PersistentCacheFactory;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.plugins.Plugin;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.errors.NotSupportedException;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class NonPersistanceCacheFactory implements PersistentCacheFactory, LifecycleListener {
private static final Logger log =
LoggerFactory.getLogger(NonPersistanceCacheFactory.class);
@Inject
NonPersistanceCacheFactory(
@GerritServerConfig Config cfg,
SitePaths site,
DynamicMap<Cache<?, ?>> cacheMap) {
}
@Override
public void start() {
throw new RuntimeException(new NotSupportedException("NonPersistanceCache should not be called, use persist=false"));
}
@Override
public void stop() {
throw new RuntimeException(new NotSupportedException("NonPersistanceCache should not be called, use persist=false"));
}
@SuppressWarnings({"unchecked"})
@Override
public <K, V> Cache<K, V> build(PersistentCacheDef<K, V> def, CacheBackend backend) {
return null;
}
@SuppressWarnings({"unchecked"})
@Override
public <K, V> LoadingCache<K, V> build(PersistentCacheDef<K, V> def, CacheLoader<K, V> loader, CacheBackend backend) {
return null;
}
@Override
public void onStop(String plugin) {
throw new RuntimeException(new NotSupportedException("NonPersistanceCache should not be called, use persist=false"));
}
}
| 31.5
| 121
| 0.720093
|
a81f4a29c0522d74cdd8cc83c218ad0ea2429649
| 9,170
|
package main;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.ls5.jlearn.interfaces.Alphabet;
import de.ls5.jlearn.interfaces.Automaton;
import de.ls5.jlearn.interfaces.State;
import de.ls5.jlearn.interfaces.Symbol;
import de.ls5.jlearn.shared.AlphabetImpl;
import de.ls5.jlearn.shared.AutomatonImpl;
/*
* Adapted from Tomte.
*/
public class Dot {
public static final String PATTERN_TRANSITION = "(s[0-9]+) -> (s[0-9]+)\\s*\\[label=\"([^\\s]*)\\s?/\\s?([^\\s]*)\"\\];?";
private static class TempState {
public State state;
public final Map<String, TempState> next = new HashMap<String, TempState>();
public final Map<String, String> out = new HashMap<String, String>();
}
public static Tuple2<Automaton,IdStateMapping> readFile(String filename) throws IOException {
Alphabet alphabet = new AlphabetImpl();
BufferedReader reader = new BufferedReader(new FileReader(filename));
Pattern transitionPattern = Pattern.compile(PATTERN_TRANSITION, Pattern.MULTILINE);
String line = reader.readLine();
Map<String, TempState> states = new HashMap<String, TempState>();
Map<Integer, State> idStateMap = new HashMap<Integer, State>();
while (line != null) {
if (line.contains("->")) {
Matcher m = transitionPattern.matcher(line);
if (m.find() ) {
String from = m.group(1);
String to = m.group(2);
String input = m.group(3);
String output = m.group(4);
TempState fromState = states.get(from);
if (fromState == null) {
fromState = new TempState();
states.put(from.intern(), fromState);
}
TempState toState = states.get(to);
if (toState == null) {
toState = new TempState();
states.put(to.intern(), toState);
}
Symbol inputSym = SymbolCache.getSymbol(input);
alphabet.addSymbol(inputSym);
fromState.next.put(input.intern(), toState);
fromState.out.put(input.intern(), output.intern());
}
}
line = reader.readLine();
}
Automaton result = new AutomatonImpl(alphabet);
states.get("s0").state = result.getStart();
idStateMap.put(0, result.getStart());
int numStates=1;
for (Map.Entry<String, TempState> entry : states.entrySet()) {
TempState state = entry.getValue();
if (state.state == null) {
State temp = result.addNewState();
state.state = temp;
Integer id = Integer.valueOf(entry.getKey().substring(1));
idStateMap.put(id, temp);
numStates ++;
}
}
for (TempState state : states.values()) {
for (String input : state.next.keySet()) {
Symbol inputSym = SymbolCache.getSymbol(input);
Symbol outputSym = SymbolCache.getSymbol(state.out.get(input));
state.state.setTransition(inputSym,
state.next.get(input).state, outputSym);
}
}
reader.close();
if (!result.isWellDefined()) {
System.out.println("Automata not well defined!");
}
if (!result.getIncompleteStates().isEmpty()) {
System.out.println("Automata has " + result.getIncompleteStates().size() + " incomplete states!");
}
if (numStates != result.getAllStates().size()) {
System.out.println("Something has gone terribly wrong in parsing the model (learning library or hidden bug)");
System.out.println("Expected number of states: " + numStates);
System.out.println("Actual number of states: " + result.getAllStates().size());
throw new InternalError();
}
return new Tuple2<>(result, new IdStateMapping(idStateMap));
}
// policy : convert into method throwing unchecked exception
public static Tuple2<Automaton, IdStateMapping> readDotFile(String filepath) {
try {
return readFile(filepath);
} catch (IOException ex) {
throw new ExceptionAdapter(ex);
}
}
/* write dot file in a deterministic sorted way :
* - alphabetic order the input alphabet
* - using this alphabetic ordered input alphabet do a breadthfirst search to all the states of the automaton
* - store the states in this order
* - for the same order of states store the transitions between the states
* - multiple transitions between a pair of states are ordered by the input order
* from the alphabetic ordered input alphabet
*/
static public void writeFile(Automaton model,String filepath, List<State> highlights, String description, boolean doubleCircleStartState,Set<Symbol> hide) throws IOException {
BufferedWriter outstream= new BufferedWriter(new FileWriter(filepath));
write(model, outstream, highlights, description, doubleCircleStartState, hide );
outstream.close();
}
static class ExceptionAdapter extends RuntimeException {
public ExceptionAdapter(Exception exc) {
super(exc);
}
}
//policy:
// write dotfile with red double circeled start state
public static void writeFile(Automaton automaton, String filepath) throws IOException {
// highlight (red) only startstate
LinkedList<State> highlights=new LinkedList<State>();
State startState=automaton.getStart();
highlights.add(startState); // states which are colored red in dotfile
writeFile( automaton, filepath, highlights ,"" , true, new HashSet() );
}
// policy : convert into method throwing unchecked exception
static public void writeDotFile(Automaton model,String filepath ) {
try {
writeFile(model, filepath);
} catch (IOException ex) {
throw new ExceptionAdapter(ex);
}
}
/* write
* same as writeFile but then to Appendable instead of filepath
*
*/
public static void write(Automaton automaton, Appendable out, List<State> highlights, String description, boolean doubleCircleStartState,Set<Symbol> hide) {
List<Symbol> inputs=automaton.getAlphabet().getSymbolList();
java.util.Collections.sort(inputs);
List<State> states = main.AutomatonUtils.getStatesInBFSOrder(automaton);
/*
for( Symbol input: inputs ) {
}
*/
//List<Symbol> getSymbolList().sort();
HashMap<State,String> labels = new HashMap<State,String>();
// List states = automaton.getAllStates();
Set <State> highlightsSet = new HashSet <State> ();
if (highlights != null) {
for (State s : highlights) {
highlightsSet.add(s);
}
}
try {
out.append("digraph G {\n");
if (description != null) {
out.append(new StringBuilder().append("label=\"").append(escapeDot(description)).append("\"").toString());
}
// states = AutomatonUtil.getStatesInBFSOrder(a);
out.append("\n");
// walk over states to print them with highlighter if set
// and also immmediate store a state's label in 'labels' mapping ( state -> label )
int i = 0;
for (State s : states) {
if (highlightsSet.contains(s))
out.append(new StringBuilder().append("s").append(i).append(" [color=\"red\"]\n").toString());
else {
out.append(new StringBuilder().append("s").append(i).append("\n").toString());
}
labels.put(s, new StringBuilder().append("s").append(i++).toString());
}
// for each state
// - print state
// - print its transitions
for (State s : states) {
if (s != null) {
String label = (String)labels.get(s);
out.append(new StringBuilder().append(label).append(" [label=\"").append(escapeDot(label)).append("\"];\n").toString());
// for (Symbol letter : s.getInputSymbols()) {
for (Symbol letter : inputs) {
//if (automaton.getAlphabet().getIndexForSymbol(letter) < realsigma) {
String destlabel = (String)labels.get(s.getTransitionState(letter));
if (hide.contains(s.getTransitionOutput(letter)))
{
continue;
}
if (destlabel != null) {
out.append(new StringBuilder().append(label).append(" -> ").append(destlabel).append("[label=<<table border=\"0\" cellpadding=\"1\" cellspacing=\"0\"><tr><td>").append(getHTMLString(letter)).append("</td><td>/</td><td>").append(getHTMLString(s.getTransitionOutput(letter))).append("</td></tr></table>>]\n").toString());
}
// }
}
}
}
out.append("}\n");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//-----------------------------------------------------------------------------------------------------------------------------
// helpers for new writedot
//-----------------------------------------------------------------------------------------------------------------------------
private static String escapeDot(String s)
{
return s.replace("\"", "\\\"");
}
private static String getHTMLString(Symbol sym) {
return escapeDot(sym.toString());
}
}
| 35.680934
| 336
| 0.633806
|
c392186b502097760e06b6dc7bf755905fb09be9
| 3,421
|
package ru.job4j.map;
import org.junit.Test;
import java.util.GregorianCalendar;
import java.util.Iterator;
import java.util.NoSuchElementException;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
public class HashMapContainerTest {
private User first = new User("petr", 3, new GregorianCalendar(2017, 3, 1));
private User duplicate = new User("petr", 3, new GregorianCalendar(2017, 3, 1));
private User second = new User("ivan", 1, new GregorianCalendar(2015, 2, 15));
private User third = new User("artem", 2, new GregorianCalendar(2007, 6, 24));
/**
* Проверяем что объект добавляется в контейнер.
*/
@Test
public void whenAddOneElemThenSizeOne() {
HashMapContainer<User, Integer> map = new HashMapContainer<>();
map.insert(this.first, 1);
assertThat(map.size(), is(1));
}
/**
* Проверяем что дубликаты не добавляется в контейнер.
*/
@Test
public void whenAddDuplicateThenSizeOne() {
HashMapContainer<User, Integer> map = new HashMapContainer<>();
map.insert(this.first, 1);
map.insert(this.duplicate, 2);
assertThat(map.size(), is(1));
}
/**
* Проверяем что поиск элемента по значение отрабатывает
* корректно.
*/
@Test
public void whenCheckToContainsElementThenTrue() {
HashMapContainer<User, Integer> map = new HashMapContainer<>();
map.insert(this.first, 1);
map.insert(this.second, 3);
assertThat(map.get(this.first), is(1));
assertThat(map.get(this.second), is(3));
assertNull(map.get(this.third));
}
/**
* Проверяем что удаление объекта из контейнера проходит
* корректно.
*/
@Test
public void whenRemoveElementThenTrue() {
HashMapContainer<User, Integer> map = new HashMapContainer<>();
map.insert(this.first, 1);
map.insert(this.second, 2);
assertThat(map.delete(this.second), is(true));
assertThat(map.size(), is(1));
}
/**
* Првоеряем, что если такого объекта для удаления нет в
* контейнере, то вернется false.
*/
@Test
public void whenRemoveElementThatNotPresentThenFalse() {
HashMapContainer<User, Integer> map = new HashMapContainer<>();
map.insert(this.first, 1);
map.insert(this.second, 2);
assertThat(map.delete(this.third), is(false));
}
/**
* Проверяем что массив увеличивается.
*/
@Test
public void whenAddMoreThenDefaultCapacity16ThenNextAddWillOk() {
HashMapContainer<String, Integer> map = new HashMapContainer<>();
for (int i = 1; i <= 20; i++) {
map.insert(String.valueOf(i), i);
}
assertThat(map.size(), is(20));
}
/**
* Проверяем что итератор работает корректно.
*/
@Test(expected = NoSuchElementException.class)
public void whenThen() {
HashMapContainer<User, Integer> map = new HashMapContainer<>();
map.insert(this.first, 1);
map.insert(this.second, 2);
map.insert(this.third, 2);
Iterator<User> it = map.iterator();
assertThat(it.hasNext(), is(true));
it.next();
assertThat(it.hasNext(), is(true));
it.next();
assertThat(it.hasNext(), is(true));
it.next();
assertThat(it.hasNext(), is(false));
it.next();
}
}
| 30.274336
| 84
| 0.617071
|
e774ac2738439775ce87f5412112dc22374f6544
| 175
|
/**
* Contains an implementation of the large universe fuzzy identity-based encryption scheme of Sahai and Waters, 2005.
*/
package org.cryptimeleon.predenc.abe.fuzzy.large;
| 43.75
| 117
| 0.788571
|
4f1daf2a310104f927146e9774ad38ff0625107c
| 1,585
|
package org.cloudfoundry.credhub.controller.v1;
import org.cloudfoundry.credhub.audit.CEFAuditRecord;
import org.cloudfoundry.credhub.audit.entity.InterpolateCredentials;
import org.cloudfoundry.credhub.handler.InterpolationHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
@RestController
@RequestMapping(path = InterpolationController.API_V1, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
public class InterpolationController {
static final String API_V1 = "/api/v1";
private final InterpolationHandler jsonInterpolationHandler;
private CEFAuditRecord auditRecord;
@Autowired
InterpolationController(InterpolationHandler jsonInterpolationHandler, CEFAuditRecord auditRecord) {
this.jsonInterpolationHandler = jsonInterpolationHandler;
this.auditRecord = auditRecord;
}
@RequestMapping(method = RequestMethod.POST, path = "/interpolate")
@ResponseStatus(HttpStatus.OK)
public Map<String, Object> interpolate(@RequestBody Map<String, Object> requestBody) {
auditRecord.setRequestDetails(new InterpolateCredentials());
return jsonInterpolationHandler.interpolateCredHubReferences(requestBody);
}
}
| 41.710526
| 104
| 0.835962
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.