focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
private boolean updateTenantUsage(CounterMode counterMode, String tenant, boolean ignoreQuotaLimit) { final Timestamp now = TimeUtils.getCurrentTime(); TenantCapacity tenantCapacity = new TenantCapacity(); tenantCapacity.setTenant(tenant); tenantCapacity.setQuota(PropertyUtil.getDefaultTenantQuota()); tenantCapacity.setGmtModified(now); if (CounterMode.INCREMENT == counterMode) { if (ignoreQuotaLimit) { return tenantCapacityPersistService.incrementUsage(tenantCapacity); } // First update the quota according to the default value. In most cases, it is the default value. // The quota field in the default value table is 0. return tenantCapacityPersistService.incrementUsageWithDefaultQuotaLimit(tenantCapacity) || tenantCapacityPersistService.incrementUsageWithQuotaLimit(tenantCapacity); } return tenantCapacityPersistService.decrementUsage(tenantCapacity); }
@Test void testUpdateTenantUsage() { when(tenantCapacityPersistService.incrementUsageWithDefaultQuotaLimit(any())).thenReturn(true); when(tenantCapacityPersistService.decrementUsage(any())).thenReturn(true); service.updateTenantUsage(CounterMode.INCREMENT, "testTenant"); Mockito.verify(tenantCapacityPersistService, times(1)).incrementUsageWithDefaultQuotaLimit(any()); service.updateTenantUsage(CounterMode.DECREMENT, "testTenant"); Mockito.verify(tenantCapacityPersistService, times(1)).decrementUsage(any()); }
public static String getTagValue( Node n, KettleAttributeInterface code ) { return getTagValue( n, code.getXmlCode() ); }
@Test public void getTagValueEmptyTagYieldsNullValue() { System.setProperty( Const.KETTLE_XML_EMPTY_TAG_YIELDS_EMPTY_VALUE, "N" ); assertNull( XMLHandler.getTagValue( getNode(), "text" ) ); }
@Override public void connect() throws IllegalStateException, IOException { if (isConnected()) { throw new IllegalStateException("Already connected"); } InetSocketAddress address = this.address; // the previous dns retry logic did not work, as address.getAddress would always return the cached value // this version of the simplified logic will always cause a dns request if hostname has been supplied. // InetAddress.getByName forces the dns lookup // if an InetSocketAddress was supplied at create time that will take precedence. if (address == null || address.getHostName() == null && hostname != null) { address = new InetSocketAddress(hostname, port); } if (address.getAddress() == null) { throw new UnknownHostException(address.getHostName()); } this.socket = socketFactory.createSocket(address.getAddress(), address.getPort()); this.writer = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream(), charset)); }
@Test public void connectsToGraphiteWithHostAndPort() throws Exception { try (Graphite graphite = new Graphite(host, port, socketFactory)) { graphite.connect(); } verify(socketFactory).createSocket(address.getAddress(), port); }
public static void populateMethodDeclarations(final ClassOrInterfaceDeclaration toPopulate, final Collection<MethodDeclaration> methodDeclarations) { methodDeclarations.forEach(toPopulate::addMember); }
@Test void populateMethodDeclarations() { final List<MethodDeclaration> toAdd = IntStream.range(0, 5) .boxed() .map(index -> getMethodDeclaration("METHOD_" + index)) .collect(Collectors.toList()); final ClassOrInterfaceDeclaration toPopulate = new ClassOrInterfaceDeclaration(); assertThat(toPopulate.getMembers()).isEmpty(); CommonCodegenUtils.populateMethodDeclarations(toPopulate, toAdd); final NodeList<BodyDeclaration<?>> retrieved = toPopulate.getMembers(); assertThat(retrieved).hasSameSizeAs(toAdd); assertThat(toAdd.stream().anyMatch(methodDeclaration -> retrieved.stream() .anyMatch(bodyDeclaration -> bodyDeclaration.equals(methodDeclaration)))).isTrue(); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n, @ParameterName( "scale" ) BigDecimal scale) { if ( n == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "n", "cannot be null")); } if ( scale == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "scale", "cannot be null")); } // Based on Table 76: Semantics of numeric functions, the scale is in range −6111 .. 6176 if (scale.compareTo(BigDecimal.valueOf(-6111)) < 0 || scale.compareTo(BigDecimal.valueOf(6176)) > 0) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "scale", "must be in range between -6111 to 6176.")); } return FEELFnResult.ofResult( n.setScale( scale.intValue(), RoundingMode.HALF_EVEN ) ); }
@Test void invokeOutRangeScale() { FunctionTestUtil.assertResultError(decimalFunction.invoke(BigDecimal.valueOf(1.5), BigDecimal.valueOf(6177)), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(decimalFunction.invoke(BigDecimal.valueOf(1.5), BigDecimal.valueOf(-6122)) , InvalidParametersEvent.class); }
@Override public void removeSecurityGroup(String sgId) { checkArgument(!Strings.isNullOrEmpty(sgId), ERR_NULL_SG_ID); osSecurityGroupStore.removeSecurityGroup(sgId); log.info(String.format(MSG_SG, sgId, MSG_REMOVED)); }
@Test(expected = IllegalArgumentException.class) public void testRemoveSecurityGroupWithNull() { target.removeSecurityGroup(null); }
public void mergeWith(RuntimeMetric metric) { if (metric == null) { return; } checkState(unit == metric.getUnit(), "The metric to be merged must have the same unit type as the current one."); sum.addAndGet(metric.getSum()); count.addAndGet(metric.getCount()); max.accumulateAndGet(metric.getMax(), Math::max); min.accumulateAndGet(metric.getMin(), Math::min); }
@Test public void testMergeWith() { RuntimeMetric metric1 = new RuntimeMetric(TEST_METRIC_NAME, NONE, 5, 2, 4, 1); RuntimeMetric metric2 = new RuntimeMetric(TEST_METRIC_NAME, NONE, 20, 2, 11, 9); metric1.mergeWith(metric2); assertRuntimeMetricEquals(metric1, new RuntimeMetric(TEST_METRIC_NAME, NONE, 25, 4, 11, 1)); metric2.mergeWith(metric2); assertRuntimeMetricEquals(metric2, new RuntimeMetric(TEST_METRIC_NAME, NONE, 40, 4, 11, 9)); metric2.mergeWith(null); assertRuntimeMetricEquals(metric2, new RuntimeMetric(TEST_METRIC_NAME, NONE, 40, 4, 11, 9)); }
public static Set<X509Certificate> filterValid( X509Certificate... certificates ) { final Set<X509Certificate> results = new HashSet<>(); if (certificates != null) { for ( X509Certificate certificate : certificates ) { if ( certificate == null ) { continue; } try { certificate.checkValidity(); } catch ( CertificateExpiredException | CertificateNotYetValidException e ) { // Not yet or no longer valid. Don't include in result. continue; } results.add( certificate ); } } return results; }
@Test public void testFilterValidWithTwoDistinctValidCerts() throws Exception { // Setup fixture. final X509Certificate validA = KeystoreTestUtils.generateValidCertificate().getCertificate(); final X509Certificate validB = KeystoreTestUtils.generateValidCertificate().getCertificate(); final Collection<X509Certificate> input = new ArrayList<>(); input.add( validA ); input.add( validB ); // Execute system under test. final Collection<X509Certificate> result = CertificateUtils.filterValid( input ); // Verify results. assertEquals( 2, result.size() ); assertTrue( result.contains( validA ) ); assertTrue( result.contains( validB ) ); }
public static Class<?> getLiteral(String className, String literal) { LiteralAnalyzer analyzer = ANALYZERS.get( className ); Class result = null; if ( analyzer != null ) { analyzer.validate( literal ); result = analyzer.getLiteral(); } return result; }
@Test public void testFloatWithLongLiteral() { assertThat( getLiteral( float.class.getCanonicalName(), "156L" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "156l" ) ).isNotNull(); }
private CoordinatorResult<ShareGroupHeartbeatResponseData, CoordinatorRecord> shareGroupHeartbeat( String groupId, String memberId, int memberEpoch, String rackId, String clientId, String clientHost, List<String> subscribedTopicNames ) throws ApiException { final long currentTimeMs = time.milliseconds(); final List<CoordinatorRecord> records = new ArrayList<>(); // Get or create the share group. boolean createIfNotExists = memberEpoch == 0; final ShareGroup group = getOrMaybeCreatePersistedShareGroup(groupId, createIfNotExists); throwIfShareGroupIsFull(group, memberId); // Get or create the member. if (memberId.isEmpty()) memberId = Uuid.randomUuid().toString(); ShareGroupMember member = getOrMaybeSubscribeShareGroupMember( group, memberId, memberEpoch, createIfNotExists ); // 1. Create or update the member. If the member is new or has changed, a ShareGroupMemberMetadataValue // record is written to the __consumer_offsets partition to persist the change. If the subscriptions have // changed, the subscription metadata is updated and persisted by writing a ShareGroupPartitionMetadataValue // record to the __consumer_offsets partition. Finally, the group epoch is bumped if the subscriptions have // changed, and persisted by writing a ShareGroupMetadataValue record to the partition. ShareGroupMember updatedMember = new ShareGroupMember.Builder(member) .maybeUpdateRackId(Optional.ofNullable(rackId)) .maybeUpdateSubscribedTopicNames(Optional.ofNullable(subscribedTopicNames)) .setClientId(clientId) .setClientHost(clientHost) .build(); boolean bumpGroupEpoch = hasMemberSubscriptionChanged( groupId, member, updatedMember, records ); int groupEpoch = group.groupEpoch(); Map<String, TopicMetadata> subscriptionMetadata = group.subscriptionMetadata(); SubscriptionType subscriptionType = group.subscriptionType(); if (bumpGroupEpoch || group.hasMetadataExpired(currentTimeMs)) { // The subscription metadata is updated in two cases: // 1) The member has updated its subscriptions; // 2) The refresh deadline has been reached. Map<String, Integer> subscribedTopicNamesMap = group.computeSubscribedTopicNames(member, updatedMember); subscriptionMetadata = group.computeSubscriptionMetadata( subscribedTopicNamesMap, metadataImage.topics(), metadataImage.cluster() ); int numMembers = group.numMembers(); if (!group.hasMember(updatedMember.memberId())) { numMembers++; } subscriptionType = ModernGroup.subscriptionType( subscribedTopicNamesMap, numMembers ); if (!subscriptionMetadata.equals(group.subscriptionMetadata())) { log.info("[GroupId {}] Computed new subscription metadata: {}.", groupId, subscriptionMetadata); bumpGroupEpoch = true; records.add(newShareGroupSubscriptionMetadataRecord(groupId, subscriptionMetadata)); } if (bumpGroupEpoch) { groupEpoch += 1; records.add(newShareGroupEpochRecord(groupId, groupEpoch)); log.info("[GroupId {}] Bumped group epoch to {}.", groupId, groupEpoch); } group.setMetadataRefreshDeadline(currentTimeMs + shareGroupMetadataRefreshIntervalMs, groupEpoch); } // 2. Update the target assignment if the group epoch is larger than the target assignment epoch. The delta between // the existing and the new target assignment is persisted to the partition. final int targetAssignmentEpoch; final Assignment targetAssignment; if (groupEpoch > group.assignmentEpoch()) { targetAssignment = updateTargetAssignment( group, groupEpoch, updatedMember, subscriptionMetadata, subscriptionType, records ); targetAssignmentEpoch = groupEpoch; } else { targetAssignmentEpoch = group.assignmentEpoch(); targetAssignment = group.targetAssignment(updatedMember.memberId()); } // 3. Reconcile the member's assignment with the target assignment if the member is not // fully reconciled yet. updatedMember = maybeReconcile( groupId, updatedMember, targetAssignmentEpoch, targetAssignment, records ); scheduleShareGroupSessionTimeout(groupId, memberId); // Prepare the response. ShareGroupHeartbeatResponseData response = new ShareGroupHeartbeatResponseData() .setMemberId(updatedMember.memberId()) .setMemberEpoch(updatedMember.memberEpoch()) .setHeartbeatIntervalMs(shareGroupHeartbeatIntervalMs); // The assignment is only provided in the following cases: // 1. The member just joined or rejoined to group (epoch equals to zero); // 2. The member's assignment has been updated. if (memberEpoch == 0 || hasAssignedPartitionsChanged(member, updatedMember)) { response.setAssignment(createShareGroupResponseAssignment(updatedMember)); } return new CoordinatorResult<>(records, response); }
@Test public void testShareGroupUnknownGroupId() { String groupId = "fooup"; String memberId = Uuid.randomUuid().toString(); MockPartitionAssignor assignor = new MockPartitionAssignor("share"); GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .withShareGroupAssignor(assignor) .build(); assertThrows(IllegalStateException.class, () -> context.shareGroupHeartbeat( new ShareGroupHeartbeatRequestData() .setGroupId(groupId) .setMemberId(memberId) .setMemberEpoch(100) // Epoch must be > 0. .setSubscribedTopicNames(Arrays.asList("foo", "bar")))); }
public static int[] sort(int[] array) { int[] order = new int[array.length]; for (int i = 0; i < order.length; i++) { order[i] = i; } sort(array, order); return order; }
@Test public void testSortFloat() { System.out.println("sort float"); float[] data1 = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; int[] order1 = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; assertArrayEquals(order1, QuickSort.sort(data1)); float[] data2 = {9, 8, 7, 6, 5, 4, 3, 2, 1, 0}; int[] order2 = {9, 8, 7, 6, 5, 4, 3, 2, 1, 0}; assertArrayEquals(order2, QuickSort.sort(data2)); float[] data3 = {0, 1, 2, 3, 5, 4, 6, 7, 8, 9}; int[] order3 = {0, 1, 2, 3, 5, 4, 6, 7, 8, 9}; assertArrayEquals(order3, QuickSort.sort(data3)); float[] data4 = {4, 1, 2, 3, 0, 5, 6, 7, 8, 9}; int[] order4 = {4, 1, 2, 3, 0, 5, 6, 7, 8, 9}; assertArrayEquals(order4, QuickSort.sort(data4)); }
@Modified public void modified(ComponentContext context) { readComponentConfiguration(context); if (requestInterceptsEnabled) { requestIntercepts(); } else { withdrawIntercepts(); } }
@Test public void removeHostByDeviceOffline() { provider.modified(CTX_FOR_REMOVE); testProcessor.process(new TestArpPacketContext(DEV1)); testProcessor.process(new TestArpPacketContext(DEV4)); Device device = new DefaultDevice(ProviderId.NONE, deviceId(DEV1), SWITCH, "m", "h", "s", "n", new ChassisId(0L)); deviceService.listener.event(new DeviceEvent(DEVICE_AVAILABILITY_CHANGED, device)); assertEquals("incorrect remove count", 2, providerService.locationRemoveCount); device = new DefaultDevice(ProviderId.NONE, deviceId(DEV4), SWITCH, "m", "h", "s", "n", new ChassisId(0L)); deviceService.listener.event(new DeviceEvent(DEVICE_AVAILABILITY_CHANGED, device)); assertEquals("incorrect remove count", 3, providerService.locationRemoveCount); }
@Override public Map<String, String[]> getParameterMap() { return stringMap; }
@Test void testGetParameterMapEmpty() { Map<String, String[]> parameterMap = reuseUploadFileHttpServletRequest.getParameterMap(); assertEquals(0, parameterMap.size()); }
private RemotingCommand getAllTopicConfig(ChannelHandlerContext ctx, RemotingCommand request) { final RemotingCommand response = RemotingCommand.createResponseCommand(GetAllTopicConfigResponseHeader.class); // final GetAllTopicConfigResponseHeader responseHeader = // (GetAllTopicConfigResponseHeader) response.readCustomHeader(); TopicConfigAndMappingSerializeWrapper topicConfigAndMappingSerializeWrapper = new TopicConfigAndMappingSerializeWrapper(); topicConfigAndMappingSerializeWrapper.setDataVersion(this.brokerController.getTopicConfigManager().getDataVersion()); topicConfigAndMappingSerializeWrapper.setTopicConfigTable(this.brokerController.getTopicConfigManager().getTopicConfigTable()); topicConfigAndMappingSerializeWrapper.setMappingDataVersion(this.brokerController.getTopicQueueMappingManager().getDataVersion()); topicConfigAndMappingSerializeWrapper.setTopicQueueMappingDetailMap(this.brokerController.getTopicQueueMappingManager().getTopicQueueMappingTable()); String content = topicConfigAndMappingSerializeWrapper.toJson(); if (content != null && content.length() > 0) { try { response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET)); } catch (UnsupportedEncodingException e) { LOGGER.error("", e); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("UnsupportedEncodingException " + e.getMessage()); return response; } } else { LOGGER.error("No topic in this broker, client: {}", ctx.channel().remoteAddress()); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("No topic in this broker"); return response; } response.setCode(ResponseCode.SUCCESS); response.setRemark(null); return response; }
@Test public void testGetAllTopicConfig() throws Exception { GetAllTopicConfigResponseHeader getAllTopicConfigResponseHeader = new GetAllTopicConfigResponseHeader(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_ALL_TOPIC_CONFIG, getAllTopicConfigResponseHeader); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
@SuppressWarnings("unchecked") public static void validateResponse(HttpURLConnection conn, int expectedStatus) throws IOException { if (conn.getResponseCode() != expectedStatus) { Exception toThrow; InputStream es = null; try { es = conn.getErrorStream(); Map json = JsonSerialization.mapReader().readValue(es); json = (Map) json.get(ERROR_JSON); String exClass = (String) json.get(ERROR_CLASSNAME_JSON); String exMsg = (String) json.get(ERROR_MESSAGE_JSON); if (exClass != null) { try { ClassLoader cl = HttpExceptionUtils.class.getClassLoader(); Class klass = cl.loadClass(exClass); Preconditions.checkState(Exception.class.isAssignableFrom(klass), "Class [%s] is not a subclass of Exception", klass); MethodHandle methodHandle = PUBLIC_LOOKUP.findConstructor( klass, EXCEPTION_CONSTRUCTOR_TYPE); toThrow = (Exception) methodHandle.invoke(exMsg); } catch (Throwable t) { toThrow = new IOException(String.format( "HTTP status [%d], exception [%s], message [%s], URL [%s]", conn.getResponseCode(), exClass, exMsg, conn.getURL())); } } else { String msg = (exMsg != null) ? exMsg : conn.getResponseMessage(); toThrow = new IOException(String.format( "HTTP status [%d], message [%s], URL [%s]", conn.getResponseCode(), msg, conn.getURL())); } } catch (Exception ex) { toThrow = new IOException(String.format( "HTTP status [%d], message [%s], URL [%s], exception [%s]", conn.getResponseCode(), conn.getResponseMessage(), conn.getURL(), ex.toString()), ex); } finally { if (es != null) { try { es.close(); } catch (IOException ex) { //ignore } } } throwEx(toThrow); } }
@Test public void testValidateResponseJsonErrorKnownException() throws Exception { Map<String, Object> json = new HashMap<String, Object>(); json.put(HttpExceptionUtils.ERROR_EXCEPTION_JSON, IllegalStateException.class.getSimpleName()); json.put(HttpExceptionUtils.ERROR_CLASSNAME_JSON, IllegalStateException.class.getName()); json.put(HttpExceptionUtils.ERROR_MESSAGE_JSON, "EX"); Map<String, Object> response = new HashMap<String, Object>(); response.put(HttpExceptionUtils.ERROR_JSON, json); ObjectMapper jsonMapper = new ObjectMapper(); String msg = jsonMapper.writeValueAsString(response); InputStream is = new ByteArrayInputStream(msg.getBytes(StandardCharsets.UTF_8)); HttpURLConnection conn = Mockito.mock(HttpURLConnection.class); Mockito.when(conn.getErrorStream()).thenReturn(is); Mockito.when(conn.getResponseMessage()).thenReturn("msg"); Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_BAD_REQUEST); LambdaTestUtils.intercept(IllegalStateException.class, "EX", () -> HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_CREATED)); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof EntryView that)) { return false; } return getKey().equals(that.getKey()) && getValue().equals(that.getValue()) && getVersion() == that.getVersion() && getCost() == that.getCost() && getCreationTime() == that.getCreationTime() && getExpirationTime() == that.getExpirationTime() && getHits() == that.getHits() && getLastAccessTime() == that.getLastAccessTime() && getLastStoredTime() == that.getLastStoredTime() && getLastUpdateTime() == that.getLastUpdateTime() && getTtl() == that.getTtl(); }
@Test public void test_equals_whenSameReference() { assertTrue(view.equals(view)); }
public static Object deserialize(String json) throws ParseException { // 去掉注释 return new JSONSerializer(json).nextValue(); }
@Test public void testDeserializeWithComment() { String s = "{" + "\"a\": null, // 111\n" + " \"b\":1, /*2 // asdsad / das */\n" + " \"c\":1, /*2 // asdsad \n \r / das */\n" + " \"d\":9999999999" + "}"; LOGGER.info(s); Map json = (Map) JSONSerializer.deserialize(s); Assert.assertNotNull(json); Assert.assertEquals(json.get("a"), null); Assert.assertEquals(json.get("b"), 1); Assert.assertEquals(json.get("c"), 1); Assert.assertEquals(json.get("d"), 9999999999l); }
synchronized SyncableFileSystemView getSecondaryView() { if (secondaryView == null) { secondaryView = secondaryViewSupplier.get(); } return secondaryView; }
@Test public void testGetSecondaryView() { when(secondaryViewSupplier.get()).thenReturn(secondary); assertEquals(secondary, fsView.getSecondaryView()); }
@Override public boolean wasNull() throws SQLException { return queryResult.wasNull(); }
@Test void assertWasNull() throws SQLException { TransparentMergedResult actual = new TransparentMergedResult(mock(QueryResult.class)); assertFalse(actual.wasNull()); }
public void start() { if (timeLimit > 0 && this.flushFuture == null) { this.flushFuture = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("DataBufferOutboundFlusher-thread") .build()) .scheduleAtFixedRate(this::flush, timeLimit, timeLimit, TimeUnit.MILLISECONDS); } }
@Test public void testConfiguredTimeLimit() throws Exception { List<Elements> values = new ArrayList<>(); PipelineOptions options = PipelineOptionsFactory.create(); options .as(ExperimentalOptions.class) .setExperiments(Arrays.asList("data_buffer_time_limit_ms=1")); final CountDownLatch waitForFlush = new CountDownLatch(1); BeamFnDataOutboundAggregator aggregator = new BeamFnDataOutboundAggregator( options, endpoint::getInstructionId, TestStreams.withOnNext( (Consumer<Elements>) e -> { values.add(e); waitForFlush.countDown(); }) .build(), false); // Test that it emits when time passed the time limit FnDataReceiver<byte[]> dataReceiver = registerOutputLocation(aggregator, endpoint, CODER); aggregator.start(); dataReceiver.accept(new byte[1]); waitForFlush.await(); // wait the flush thread to flush the buffer Assert.assertEquals(messageWithData(new byte[1]), values.get(0)); }
public CommandReturn runWithOutput() throws IOException { Process process = null; BufferedReader inReader = null; try { process = new ProcessBuilder(mCommand).redirectErrorStream(true).start(); inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); // read the output of the command StringBuilder stdout = new StringBuilder(); String outLine = inReader.readLine(); while (outLine != null) { stdout.append(outLine); stdout.append("\n"); outLine = inReader.readLine(); } // wait for the process to finish and check the exit code int exitCode = process.waitFor(); if (exitCode != 0) { // log error instead of throwing exception LOG.warn(String.format("Non-zero exit code (%d) from command %s", exitCode, Arrays.toString(mCommand))); } CommandReturn cr = new CommandReturn(exitCode, mCommand, stdout.toString()); // destroy the process if (process != null) { process.destroy(); } return cr; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException(e); } catch (Exception e) { return new CommandReturn(1, String.format("Command %s failed, exception is %s", Arrays.toString(mCommand), e.getMessage())); } finally { if (inReader != null) { inReader.close(); } if (process != null) { process.destroy(); } } }
@Test public void execCommandTolerateFailureFailed() throws Exception { // create temp file File testDir = AlluxioTestDirectory.createTemporaryDirectory("command"); // do sth wrong String[] testCommandFail = new String[]{"ls", String.format("%saaaa", testDir.getAbsolutePath())}; CommandReturn crf = new ShellCommand(testCommandFail).runWithOutput(); assertNotEquals(0, crf.getExitCode()); // The error is redirected into stdout assertTrue(crf.getOutput().length() > 0); assertNotEquals("", crf.getOutput()); }
public void removeMetricsForCertificates(Predicate<CertificateMetricKey> shouldDelete) { final List<CertificateMetricKey> removedKeys = new ArrayList<>(); certificateExpirationMap.keySet().stream() .map(CertificateMetricKey.class::cast) .filter(shouldDelete) .forEach(key -> { Tags tags = MetricsUtils.getAllMetricTags(key.getNamespace(), key.getKind(), Optional.empty(), Tag.of("cluster", key.getClusterName()), Tag.of("type", key.getCaType())); removeMetric(METRICS_CERTIFICATE_EXPIRATION_MS, tags); removedKeys.add(key); }); removedKeys.forEach(certificateExpirationMap::remove); }
@Test @DisplayName("Should not remove certificate metrics for cluster CA type") void shouldNotRemoveCertificateMetricsForClusterCaType() { Predicate<CertificateMetricKey> predicate = key -> matchCaTypes(key.getCaType(), CertificateMetricKey.Type.CLIENT_CA); metricsHolder.removeMetricsForCertificates(predicate); boolean hasClusterCaMetric = metricsHolder.certificateExpirationMap.keySet().stream() .anyMatch(key -> matchCaTypes(((CertificateMetricKey) key).getCaType(), CertificateMetricKey.Type.CLUSTER_CA)); assertTrue(hasClusterCaMetric, "Cluster CA metric should not be removed"); }
@Override public <T> T unwrap(Class<T> clazz) { if (!clazz.isInstance(this)) { throw new IllegalArgumentException("Class " + clazz + " is unknown to this implementation"); } @SuppressWarnings("unchecked") T castedEntry = (T) this; return castedEntry; }
@Test public void unwrap_fail() { assertThrows(IllegalArgumentException.class, () -> event.unwrap(Map.Entry.class)); }
@CanIgnoreReturnValue public final Ordered containsAtLeastEntriesIn(Multimap<?, ?> expectedMultimap) { checkNotNull(expectedMultimap, "expectedMultimap"); checkNotNull(actual); ListMultimap<?, ?> missing = difference(expectedMultimap, actual); // TODO(kak): Possible enhancement: Include "[1 copy]" if the element does appear in // the subject but not enough times. Similarly for unexpected extra items. if (!missing.isEmpty()) { failWithActual( fact("missing", countDuplicatesMultimap(annotateEmptyStringsMultimap(missing))), simpleFact("---"), fact("expected to contain at least", annotateEmptyStringsMultimap(expectedMultimap))); return ALREADY_FAILED; } return new MultimapInOrder(/* allowUnexpected = */ true, expectedMultimap); }
@Test public void containsAtLeastInOrderFailureValuesOnly() { ImmutableMultimap<Integer, String> actual = ImmutableMultimap.of(3, "one", 3, "six", 3, "two", 4, "five", 4, "four"); ImmutableMultimap<Integer, String> expected = ImmutableMultimap.of(3, "six", 3, "one", 4, "five", 4, "four"); assertThat(actual).containsAtLeastEntriesIn(expected); expectFailureWhenTestingThat(actual).containsAtLeastEntriesIn(expected).inOrder(); assertFailureKeys( "contents match, but order was wrong", "keys with out-of-order values", "---", "expected to contain at least", "but was"); assertFailureValue("keys with out-of-order values", "[3]"); }
@Override public void process(HealthCheckTaskV2 task, Service service, ClusterMetadata metadata) { HealthCheckInstancePublishInfo instance = (HealthCheckInstancePublishInfo) task.getClient() .getInstancePublishInfo(service); if (null == instance) { return; } try { // TODO handle marked(white list) logic like v1.x. if (!instance.tryStartCheck()) { SRV_LOG.warn("http check started before last one finished, service: {} : {} : {}:{}", service.getGroupedServiceName(), instance.getCluster(), instance.getIp(), instance.getPort()); healthCheckCommon .reEvaluateCheckRT(task.getCheckRtNormalized() * 2, task, switchDomain.getHttpHealthParams()); return; } Http healthChecker = (Http) metadata.getHealthChecker(); int ckPort = metadata.isUseInstancePortForCheck() ? instance.getPort() : metadata.getHealthyCheckPort(); URL host = new URL(HTTP_PREFIX + instance.getIp() + ":" + ckPort); URL target = new URL(host, healthChecker.getPath()); Map<String, String> customHeaders = healthChecker.getCustomHeaders(); Header header = Header.newInstance(); header.addAll(customHeaders); ASYNC_REST_TEMPLATE.get(target.toString(), header, Query.EMPTY, String.class, new HttpHealthCheckCallback(instance, task, service)); MetricsMonitor.getHttpHealthCheckMonitor().incrementAndGet(); } catch (Throwable e) { instance.setCheckRt(switchDomain.getHttpHealthParams().getMax()); healthCheckCommon.checkFail(task, service, "http:error:" + e.getMessage()); healthCheckCommon.reEvaluateCheckRT(switchDomain.getHttpHealthParams().getMax(), task, switchDomain.getHttpHealthParams()); } }
@Test void testProcess() { httpHealthCheckProcessor.process(healthCheckTaskV2, service, clusterMetadata); verify(healthCheckTaskV2).getClient(); verify(healthCheckInstancePublishInfo).tryStartCheck(); }
public Materialization create( final StreamsMaterialization delegate, final MaterializationInfo info, final QueryId queryId, final QueryContext.Stacker contextStacker ) { final TransformVisitor transformVisitor = new TransformVisitor(queryId, contextStacker); final List<Transform> transforms = info .getTransforms() .stream() .map(xform -> xform.visit(transformVisitor)) .collect(Collectors.toList()); return materializationFactory.create( delegate, info.getSchema(), transforms ); }
@Test public void shouldUseCorrectLoggerForSelectMapper() { // When: factory.create(materialization, info, queryId, new Stacker().push("project")); // Then: verify(mapperInfo).getMapper(loggerCaptor.capture()); assertThat( loggerCaptor.getValue().apply(new Stacker().getQueryContext()), is(mapProcessingLogger) ); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeMapSourceNoOverlap() throws JsonProcessingException { Map<String, ParamDefinition> allParams = parseParamDefMap( "{'tomergemap1': {'type': 'MAP', 'source': 'SYSTEM', 'value': {'tomerge1': {'source':'SYSTEM', 'type': 'STRING','value': 'hello'}}}}"); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap( "{'tomergemap2': {'type': 'MAP', 'value': {'tomerge2':{'type': 'STRING', 'value': 'goodbye'}}}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext); assertEquals(2, allParams.size()); MapParamDefinition tomergemap1 = allParams.get("tomergemap1").asMapParamDef(); assertEquals("hello", tomergemap1.getValue().get("tomerge1").getValue()); // did not touch, can remain system assertEquals( ParamSource.SYSTEM, tomergemap1.getValue().get("tomerge1").asStringParamDef().getSource()); assertEquals(ParamSource.SYSTEM, tomergemap1.getSource()); MapParamDefinition tomergemap2 = allParams.get("tomergemap2").asMapParamDef(); assertEquals("goodbye", tomergemap2.getValue().get("tomerge2").getValue()); assertEquals( ParamSource.DEFINITION, tomergemap2.getValue().get("tomerge2").asStringParamDef().getSource()); // got it from definition stage assertEquals(ParamSource.DEFINITION, tomergemap2.getSource()); }
@Override public void unsubscribe(String serviceName, EventListener listener) throws NacosException { unsubscribe(serviceName, new ArrayList<>(), listener); }
@Test void testUnSubscribe1() throws NacosException { //given String serviceName = "service1"; EventListener listener = event -> { }; when(changeNotifier.isSubscribed(Constants.DEFAULT_GROUP, serviceName)).thenReturn(false); //when client.unsubscribe(serviceName, listener); //then NamingSelectorWrapper wrapper = new NamingSelectorWrapper( NamingSelectorFactory.newClusterSelector(Collections.emptyList()), listener); verify(changeNotifier, times(1)).deregisterListener(Constants.DEFAULT_GROUP, serviceName, wrapper); verify(proxy, times(1)).unsubscribe(serviceName, Constants.DEFAULT_GROUP, Constants.NULL); }
@Override @Nullable public char[] readCharArray(@Nonnull String fieldName) throws IOException { return readIncompatibleField(fieldName, CHAR_ARRAY, super::readCharArray); }
@Test(expected = IncompatibleClassChangeError.class) public void testReadCharArray_IncompatibleClass() throws Exception { reader.readCharArray("byte"); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { if(log.isDebugEnabled()) { log.debug(String.format("List containers for %s", session)); } try { final AttributedList<Path> containers = new AttributedList<>(); final int limit = new HostPreferences(session.getHost()).getInteger("openstack.list.container.limit"); final Client client = session.getClient(); for(final Region r : client.getRegions()) { if(region.getIdentifier() != null) { if(!StringUtils.equals(r.getRegionId(), region.getIdentifier())) { log.warn(String.format("Skip region %s", r)); continue; } } try { // List all containers List<Container> chunk; String marker = null; do { chunk = client.listContainers(r, limit, marker); for(final Container f : chunk) { final PathAttributes attributes = new PathAttributes(); attributes.setRegion(f.getRegion().getRegionId()); containers.add(new Path(String.format("/%s", f.getName()), EnumSet.of(Path.Type.volume, Path.Type.directory), attributes)); marker = f.getName(); } listener.chunk(directory, containers); } while(!chunk.isEmpty()); } catch(GenericException e) { if(e.getHttpStatusCode() == HttpStatus.SC_SERVICE_UNAVAILABLE) { log.warn(String.format("Ignore failure %s for region %s", e, region)); continue; } throw e; } } return containers; } catch(GenericException e) { throw new SwiftExceptionMappingService().map("Listing directory {0} failed", e, directory); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testListLimitRegion() throws Exception { final AttributedList<Path> list = new SwiftContainerListService(session, new SwiftLocationFeature.SwiftRegion("IAD") ).list(new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), new DisabledListProgressListener()); final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.volume, Path.Type.directory)); container.attributes().setRegion("IAD"); assertTrue(list.contains(container)); }
public V getValue() { access(); return getValueInternal(); }
@Test public void testGetValue() { assertEquals(0, replicatedRecord.getHits()); assertEquals("value", replicatedRecord.getValue()); assertEquals(1, replicatedRecord.getHits()); }
@Override public CustomPage<Product> getProducts(ProductPagingRequest productPagingRequest) { final Page<ProductEntity> productEntityPage = productRepository.findAll(productPagingRequest.toPageable()); if (productEntityPage.getContent().isEmpty()) { throw new ProductNotFoundException("Couldn't find any Product"); } final List<Product> productDomainModels = listProductEntityToListProductMapper .toProductList(productEntityPage.getContent()); return CustomPage.of(productDomainModels, productEntityPage); }
@Test void givenProductPagingRequest_WhenProductPageList_ThenReturnCustomPageProductList() { // Given ProductPagingRequest pagingRequest = ProductPagingRequest.builder() .pagination( CustomPaging.builder() .pageSize(1) .pageNumber(1) .build() ).build(); Page<ProductEntity> productEntityPage = new PageImpl<>(Collections.singletonList(new ProductEntity())); List<Product> products = listProductEntityToListProductMapper.toProductList(productEntityPage.getContent()); CustomPage<Product> expected = CustomPage.of(products, productEntityPage); // When when(productRepository.findAll(any(Pageable.class))).thenReturn(productEntityPage); // Then CustomPage<Product> result = productReadService.getProducts(pagingRequest); assertNotNull(result); assertFalse(result.getContent().isEmpty()); assertEquals(expected.getPageNumber(), result.getPageNumber()); assertEquals(expected.getContent().get(0).getId(), result.getContent().get(0).getId()); assertEquals(expected.getTotalPageCount(), result.getTotalPageCount()); assertEquals(expected.getTotalElementCount(), result.getTotalElementCount()); // Verify verify(productRepository, times(1)).findAll(any(Pageable.class)); }
@Override public DirectoryTimestamp getDirectoryTimestamp() { return DirectoryTimestamp.implicit; }
@Test public void testFeatures() { assertEquals(Protocol.Case.sensitive, new FTPTLSProtocol().getCaseSensitivity()); assertEquals(Protocol.DirectoryTimestamp.implicit, new FTPTLSProtocol().getDirectoryTimestamp()); }
public KafkaConfiguration getConfiguration() { return configuration; }
@Test public void testBrokersOnComponent() { KafkaComponent kafka = context.getComponent("kafka", KafkaComponent.class); kafka.getConfiguration().setBrokers("broker1:12345,broker2:12566"); String uri = "kafka:mytopic?partitioner=com.class.Party"; KafkaEndpoint endpoint = context.getEndpoint(uri, KafkaEndpoint.class); assertEquals("broker1:12345,broker2:12566", endpoint.getConfiguration().getBrokers()); assertEquals("broker1:12345,broker2:12566", endpoint.getComponent().getConfiguration().getBrokers()); assertEquals("mytopic", endpoint.getConfiguration().getTopic()); assertEquals("com.class.Party", endpoint.getConfiguration().getPartitioner()); }
public void writeHeapHistogram(HeapHistogram heapHistogram) throws IOException { try { document.open(); addParagraph( getFormattedString("heap_histo_du", I18N.createDateAndTimeFormat().format(heapHistogram.getTime())), "memory.png"); new PdfHeapHistogramReport(heapHistogram, document).toPdf(); } catch (final DocumentException e) { throw createIOException(e); } document.close(); }
@Test public void testWriteHeapHistogram() throws IOException { final ByteArrayOutputStream output = new ByteArrayOutputStream(); try (InputStream input = getClass().getResourceAsStream("/heaphisto.txt")) { final PdfOtherReport pdfOtherReport = new PdfOtherReport(TEST_APP, output); final HeapHistogram heapHistogram = new HeapHistogram(input, false); pdfOtherReport.writeHeapHistogram(heapHistogram); pdfOtherReport.close(); } assertNotEmptyAndClear(output); try (InputStream input2 = getClass().getResourceAsStream("/heaphisto_jrockit.txt")) { final PdfOtherReport pdfOtherReport = new PdfOtherReport(TEST_APP, output); final HeapHistogram heapHistogram = new HeapHistogram(input2, true); pdfOtherReport.writeHeapHistogram(heapHistogram); pdfOtherReport.close(); } assertNotEmptyAndClear(output); }
public static List<URL> parseConfigurators(String rawConfig) { // compatible url JsonArray, such as [ "override://xxx", "override://xxx" ] List<URL> compatibleUrls = parseJsonArray(rawConfig); if (CollectionUtils.isNotEmpty(compatibleUrls)) { return compatibleUrls; } List<URL> urls = new ArrayList<>(); ConfiguratorConfig configuratorConfig = parseObject(rawConfig); String scope = configuratorConfig.getScope(); List<ConfigItem> items = configuratorConfig.getConfigs(); if (ConfiguratorConfig.SCOPE_APPLICATION.equals(scope)) { items.forEach(item -> urls.addAll(appItemToUrls(item, configuratorConfig))); } else { // service scope by default. items.forEach(item -> urls.addAll(serviceItemToUrls(item, configuratorConfig))); } return urls; }
@Test void parseConfiguratorsServiceNoAppTest() throws Exception { try (InputStream yamlStream = this.getClass().getResourceAsStream("/ServiceNoApp.yml")) { List<URL> urls = ConfigParser.parseConfigurators(streamToString(yamlStream)); Assertions.assertNotNull(urls); Assertions.assertEquals(2, urls.size()); URL url = urls.get(0); Assertions.assertEquals("127.0.0.1:20880", url.getAddress()); Assertions.assertEquals(222, url.getParameter(WEIGHT_KEY, 0)); } }
public static TableElements of(final TableElement... elements) { return new TableElements(ImmutableList.copyOf(elements)); }
@SuppressWarnings("UnstableApiUsage") @Test public void shouldImplementHashCodeAndEqualsProperty() { final List<TableElement> someElements = ImmutableList.of( tableElement("bob", INT_TYPE) ); new EqualsTester() .addEqualityGroup(TableElements.of(someElements), TableElements.of(someElements)) .addEqualityGroup(TableElements.of()) .testEquals(); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchIcmpv6TypeTest() { Criterion criterion = Criteria.matchIcmpv6Type((byte) 250); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
public AgeRecogniser() { try { secondaryParser = new Tika(new TikaConfig()); available = true; } catch (Exception e) { available = false; LOG.error("Unable to initialize secondary parser", e); } }
@Test public void testAgeRecogniser() throws Exception { //test config is added to resources directory try (InputStream is = getResourceAsStream(CONFIG_FILE); InputStream bis = new ByteArrayInputStream( TEST_TEXT.getBytes(StandardCharsets.UTF_8))) { TikaConfig config = new TikaConfig(is); Tika tika = new Tika(config); Metadata md = new Metadata(); tika.parse(bis, md); assertArrayEquals(new String[]{CompositeParser.class.getCanonicalName(), AgeRecogniser.class.getCanonicalName()}, md.getValues(TikaCoreProperties.TIKA_PARSED_BY), "Age Parser not invoked."); assertArrayEquals( new String[]{Double.toString(TEST_AGE)}, md.getValues(AgeRecogniser.MD_KEY_ESTIMATED_AGE), "Wrong age predicted."); } }
public static OutputStreamAndPath createEntropyAware( FileSystem fs, Path path, WriteMode writeMode) throws IOException { final Path processedPath = addEntropy(fs, path); // create the stream on the original file system to let the safety net // take its effect final FSDataOutputStream out = fs.create(processedPath, writeMode); return new OutputStreamAndPath(out, processedPath); }
@Test void testCreateEntropyAwarePlainFs() throws Exception { File folder = TempDirUtils.newFolder(tempFolder); Path path = new Path(Path.fromLocalFile(folder), "_entropy_/file"); OutputStreamAndPath out = EntropyInjector.createEntropyAware( LocalFileSystem.getSharedInstance(), path, WriteMode.NO_OVERWRITE); out.stream().close(); assertThat(out.path()).isEqualTo(path); assertThat(new File(new File(folder, "_entropy_"), "file")).exists(); }
@Override public void setConfigAttributes(Object attributes) { if (attributes != null) { this.clear(); ((List<Map<String, String>>)attributes).forEach(attributeMap -> this.add(new EnvironmentAgentConfig(attributeMap.get("uuid")))); } }
@Test void shouldNotSetAgentConfigAttributesWhenItIsNull(){ envAgentsConfig.setConfigAttributes(null); assertThat(envAgentsConfig.size(), is(0)); }
public static Calendar parseXmlDate(String xsDate) throws ParseException { try { final DatatypeFactory df = DatatypeFactory.newInstance(); final XMLGregorianCalendar dateTime = df.newXMLGregorianCalendar(xsDate); return dateTime.toGregorianCalendar(); } catch (DatatypeConfigurationException ex) { throw new ParseException("Unable to parse " + xsDate, ex); } }
@Test public void testParseXmlDate() throws ParseException { String xsDate = "2019-01-02Z"; Calendar result = DateUtil.parseXmlDate(xsDate); assertEquals(2019, result.get(Calendar.YEAR)); //month is zero based. assertEquals(0, result.get(Calendar.MONTH)); assertEquals(2, result.get(Calendar.DATE)); }
public Optional<Node> localCorpusDispatchTarget() { if (localCorpusDispatchTarget == null) return Optional.empty(); // Only use direct dispatch if the local group has sufficient coverage Group localSearchGroup = groups.get(localCorpusDispatchTarget.group()); if ( ! localSearchGroup.hasSufficientCoverage()) return Optional.empty(); // Only use direct dispatch if the local search node is not down if (localCorpusDispatchTarget.isWorking() == Boolean.FALSE) return Optional.empty(); return Optional.of(localCorpusDispatchTarget); }
@Test void requireThatZeroDocsAreFine() { try (State test = new State("cluster.1", 2, "a", "b")) { test.waitOneFullPingRound(); assertTrue(test.vipStatus.isInRotation()); assertTrue(test.searchCluster.localCorpusDispatchTarget().isEmpty()); test.numDocsPerNode.get(0).set(-1); test.numDocsPerNode.get(1).set(-1); test.waitOneFullPingRound(); assertFalse(test.vipStatus.isInRotation()); test.numDocsPerNode.get(0).set(0); test.waitOneFullPingRound(); assertTrue(test.vipStatus.isInRotation()); } }
@SuppressWarnings("unchecked") void sort(String[] filenames) { Arrays.sort(filenames, new Comparator<String>() { @Override public int compare(String f1, String f2) { int result = 0; for (FilenameParser p : parsers) { Comparable c2 = p.parseFilename(f2); Comparable c1 = p.parseFilename(f1); if (c2 != null && c1 != null) { result += c2.compareTo(c1); } } // fallback to raw filename comparison if (result == 0) { result = f2.compareTo(f1); } return result; } }); }
@Test public void sortsDescendingByDateAndInteger() { final String[] FILENAMES = new String[] { "/var/logs/my-app/2018-10-31/9.log", "/var/logs/my-app/2019-01-01/1.log", "/var/logs/my-app/1999-03-17/3.log", "/var/logs/my-app/2019-01-01/11.log", "/var/logs/my-app/2019-01-01/2.log", "/var/logs/my-app/2016-12-25/10.log", }; final String[] EXPECTED_RESULT = new String[] { "/var/logs/my-app/2019-01-01/11.log", "/var/logs/my-app/2019-01-01/2.log", "/var/logs/my-app/2019-01-01/1.log", "/var/logs/my-app/2018-10-31/9.log", "/var/logs/my-app/2016-12-25/10.log", "/var/logs/my-app/1999-03-17/3.log", }; assertThat(sort("/var/logs/my-app/%d{yyyy-MM-dd}/%i.log", FILENAMES), contains(EXPECTED_RESULT)); }
@Override protected double maintain() { // Reboot candidates: Nodes in long-term states, where we know we can safely orchestrate a reboot List<Node> nodesToReboot = nodeRepository().nodes().list(Node.State.active, Node.State.ready).stream() .filter(node -> node.type().isHost()) .filter(this::shouldReboot) .toList(); if (!nodesToReboot.isEmpty()) nodeRepository().nodes().reboot(NodeListFilter.from(nodesToReboot)); return 1.0; }
@Test public void testRebootScheduling() { Duration rebootInterval = Duration.ofDays(30); InMemoryFlagSource flagSource = new InMemoryFlagSource(); ProvisioningTester tester = createTester(rebootInterval, flagSource); makeReadyHosts(15, tester); NodeRepository nodeRepository = tester.nodeRepository(); NodeRebooter rebooter = new NodeRebooter(nodeRepository, flagSource, new TestMetric()); assertReadyHosts(15, nodeRepository, 0L); // No reboots within 0x-1x reboot interval tester.clock().advance(rebootInterval); rebooter.maintain(); simulateReboot(nodeRepository); assertReadyHosts(15, nodeRepository, 0L); // All nodes/hosts reboots within 1x-2x reboot interval tester.clock().advance(rebootInterval); rebooter.maintain(); simulateReboot(nodeRepository); assertReadyHosts(15, nodeRepository, 1L); // OS upgrade just before reboots would have been scheduled again tester.clock().advance(rebootInterval); scheduleOsUpgrade(nodeRepository); simulateOsUpgrade(nodeRepository); rebooter.maintain(); simulateReboot(nodeRepository); assertReadyHosts(15, nodeRepository, 1L); // OS upgrade counts as reboot, so within 0x-1x there is no reboots tester.clock().advance(rebootInterval); rebooter.maintain(); scheduleOsUpgrade(nodeRepository); simulateOsUpgrade(nodeRepository); assertReadyHosts(15, nodeRepository, 1L); // OS upgrade counts as reboot, but within 1x-2x reboots are scheduled again tester.clock().advance(rebootInterval); rebooter.maintain(); simulateReboot(nodeRepository); assertReadyHosts(15, nodeRepository, 2L); }
public void addOtherTesseractConfig(String key, String value) { if (key == null) { throw new IllegalArgumentException("key must not be null"); } if (value == null) { throw new IllegalArgumentException("value must not be null"); } Matcher m = ALLOWABLE_OTHER_PARAMS_PATTERN.matcher(key); if (!m.find()) { throw new IllegalArgumentException("Key contains illegal characters: " + key); } m.reset(value); if (!m.find()) { throw new IllegalArgumentException("Value contains illegal characters: " + value); } otherTesseractConfig.put(key.trim(), value.trim()); userConfigured.add("otherTesseractConfig"); }
@Test public void testGoodOtherParameters() { TesseractOCRConfig config = new TesseractOCRConfig(); config.addOtherTesseractConfig("good", "good"); }
@Override public void setConf(Configuration conf) { if (conf != null) { conf = addSecurityConfiguration(conf); } super.setConf(conf); }
@Test public void testFencingConfigPerNameNode() throws Exception { Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus(); final String nsSpecificKey = DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY + "." + NSID; final String nnSpecificKey = nsSpecificKey + ".nn1"; HdfsConfiguration conf = getHAConf(); // Set the default fencer to succeed conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, getFencerTrueCommand()); tool.setConf(conf); assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence")); // Set the NN-specific fencer to fail. Should fail to fence. conf.set(nnSpecificKey, getFencerFalseCommand()); tool.setConf(conf); assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence")); conf.unset(nnSpecificKey); // Set an NS-specific fencer to fail. Should fail. conf.set(nsSpecificKey, getFencerFalseCommand()); tool.setConf(conf); assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence")); // Set the NS-specific fencer to succeed. Should succeed conf.set(nsSpecificKey, getFencerTrueCommand()); tool.setConf(conf); assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence")); }
@Override public MailLogDO getMailLog(Long id) { return mailLogMapper.selectById(id); }
@Test public void testGetMailLog() { // mock 数据 MailLogDO dbMailLog = randomPojo(MailLogDO.class, o -> o.setTemplateParams(randomTemplateParams())); mailLogMapper.insert(dbMailLog); // 准备参数 Long id = dbMailLog.getId(); // 调用 MailLogDO mailLog = mailLogService.getMailLog(id); // 断言 assertPojoEquals(dbMailLog, mailLog); }
void onAddRcvDestination(final long registrationId, final String destinationChannel, final long correlationId) { if (destinationChannel.startsWith(IPC_CHANNEL)) { onAddRcvIpcDestination(registrationId, destinationChannel, correlationId); } else if (destinationChannel.startsWith(SPY_QUALIFIER)) { onAddRcvSpyDestination(registrationId, destinationChannel, correlationId); } else { onAddRcvNetworkDestination(registrationId, destinationChannel, correlationId); } }
@Test void shouldThrowExceptionWhenRcvDestinationHasResponseCorrelationIdSet() { final Exception exception = assertThrowsExactly(InvalidChannelException.class, () -> driverConductor.onAddRcvDestination( 42, "aeron:udp?endpoint=localhost:8080|response-correlation-id=1234", 1) ); assertThat( exception.getMessage(), CoreMatchers.startsWith("ERROR - destinations must not contain the key: response-correlation-id")); }
public void clear() { this.setRemoteAddress(null).setLocalAddress(null).setFuture(null).setProviderSide(null) .setProviderInfo(null); this.attachments = new ConcurrentHashMap<String, Object>(); this.stopWatch.reset(); }
@Test public void testClear() { RpcInternalContext context = RpcInternalContext.getContext(); context.setRemoteAddress("127.0.0.1", 1234); context.setLocalAddress("127.0.0.1", 2345); context.setFuture(new ResponseFuture<String>() { @Override public boolean cancel(boolean mayInterruptIfRunning) { return false; } @Override public boolean isCancelled() { return false; } @Override public boolean isDone() { return false; } @Override public String get() throws InterruptedException, ExecutionException { return null; } @Override public String get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return null; } @Override public ResponseFuture addListeners(List<SofaResponseCallback> sofaResponseCallbacks) { return null; } @Override public ResponseFuture addListener(SofaResponseCallback sofaResponseCallback) { return null; } }); context.setProviderInfo(ProviderHelper.toProviderInfo("127.0.0.1:80")); context.setAttachment("_xxxx", "yyyy"); context.clear(); Assert.assertNull(context.getRemoteAddress()); Assert.assertNull(context.getLocalAddress()); Assert.assertNull(context.getFuture()); Assert.assertFalse(context.isProviderSide()); Assert.assertFalse(context.isConsumerSide()); Assert.assertNull(context.getProviderInfo()); Assert.assertTrue(context.getAttachments().isEmpty()); Assert.assertNotNull(context.getStopWatch()); Assert.assertTrue(context.getStopWatch().read() == 0); Assert.assertNotNull(context.toString()); }
public long getAndInc() { return value++; }
@Test public void testGetAndInc() { MutableLong mutableLong = MutableLong.valueOf(13); assertEquals(13L, mutableLong.getAndInc()); assertEquals(14L, mutableLong.value); }
@Override public ScannerReport.Component readComponent(int componentRef) { ensureInitialized(); return delegate.readComponent(componentRef); }
@Test public void verify_readComponent_returns_Component() { writer.writeComponent(COMPONENT); assertThat(underTest.readComponent(COMPONENT_REF)).isEqualTo(COMPONENT); }
@Override public RowExpression optimize(RowExpression rowExpression, Level level, ConnectorSession session) { if (level.ordinal() <= OPTIMIZED.ordinal()) { return toRowExpression(rowExpression.getSourceLocation(), new RowExpressionInterpreter(rowExpression, metadata.getFunctionAndTypeManager(), session, level).optimize(), rowExpression.getType()); } throw new IllegalArgumentException("Not supported optimization level: " + level); }
@Test public void testCastWithJsonParseOptimization() { FunctionHandle jsonParseFunctionHandle = functionAndTypeManager.lookupFunction("json_parse", fromTypes(VARCHAR)); // constant FunctionHandle jsonCastFunctionHandle = functionAndTypeManager.lookupCast(CAST, JSON, functionAndTypeManager.getType(parseTypeSignature("array(integer)"))); RowExpression jsonCastExpression = new CallExpression(CAST.name(), jsonCastFunctionHandle, new ArrayType(INTEGER), ImmutableList.of(call("json_parse", jsonParseFunctionHandle, JSON, constant(utf8Slice("[1, 2]"), VARCHAR)))); RowExpression resultExpression = optimize(jsonCastExpression); assertInstanceOf(resultExpression, ConstantExpression.class); Object resultValue = ((ConstantExpression) resultExpression).getValue(); assertInstanceOf(resultValue, IntArrayBlock.class); assertEquals(toValues(INTEGER, (IntArrayBlock) resultValue), ImmutableList.of(1, 2)); // varchar to array jsonCastFunctionHandle = functionAndTypeManager.lookupCast(CAST, JSON, functionAndTypeManager.getType(parseTypeSignature("array(varchar)"))); jsonCastExpression = call(CAST.name(), jsonCastFunctionHandle, new ArrayType(VARCHAR), ImmutableList.of(call("json_parse", jsonParseFunctionHandle, JSON, field(1, VARCHAR)))); resultExpression = optimize(jsonCastExpression); assertEquals( resultExpression, call(JSON_TO_ARRAY_CAST.name(), functionAndTypeManager.lookupCast(JSON_TO_ARRAY_CAST, VARCHAR, functionAndTypeManager.getType(parseTypeSignature("array(varchar)"))), new ArrayType(VARCHAR), field(1, VARCHAR))); // varchar to map jsonCastFunctionHandle = functionAndTypeManager.lookupCast(CAST, JSON, functionAndTypeManager.getType(parseTypeSignature("map(integer,varchar)"))); jsonCastExpression = call(CAST.name(), jsonCastFunctionHandle, mapType(INTEGER, VARCHAR), ImmutableList.of(call("json_parse", jsonParseFunctionHandle, JSON, field(1, VARCHAR)))); resultExpression = optimize(jsonCastExpression); assertEquals( resultExpression, call(JSON_TO_MAP_CAST.name(), functionAndTypeManager.lookupCast(JSON_TO_MAP_CAST, VARCHAR, functionAndTypeManager.getType(parseTypeSignature("map(integer, varchar)"))), mapType(INTEGER, VARCHAR), field(1, VARCHAR))); // varchar to row jsonCastFunctionHandle = functionAndTypeManager.lookupCast(CAST, JSON, functionAndTypeManager.getType(parseTypeSignature("row(varchar,bigint)"))); jsonCastExpression = call(CAST.name(), jsonCastFunctionHandle, RowType.anonymous(ImmutableList.of(VARCHAR, BIGINT)), ImmutableList.of(call("json_parse", jsonParseFunctionHandle, JSON, field(1, VARCHAR)))); resultExpression = optimize(jsonCastExpression); assertEquals( resultExpression, call(JSON_TO_ROW_CAST.name(), functionAndTypeManager.lookupCast(JSON_TO_ROW_CAST, VARCHAR, functionAndTypeManager.getType(parseTypeSignature("row(varchar,bigint)"))), RowType.anonymous(ImmutableList.of(VARCHAR, BIGINT)), field(1, VARCHAR))); }
public EmbeddedCacheManager getNativeCacheManager() { return this.nativeCacheManager; }
@Test public final void getNativeCacheShouldReturnTheEmbeddedCacheManagerSuppliedAtConstructionTime() { withCacheManager(new CacheManagerCallable(TestCacheManagerFactory.createCacheManager()) { @Override public void call() { final SpringEmbeddedCacheManager objectUnderTest = new SpringEmbeddedCacheManager(cm); final EmbeddedCacheManager nativeCacheManagerReturned = objectUnderTest.getNativeCacheManager(); assertSame( "getNativeCacheManager() should have returned the EmbeddedCacheManager supplied at construction time. However, it retuned a different one.", cm, nativeCacheManagerReturned); } }); }
public void setString(@NotNull final String key, @NotNull final String value) { props.setProperty(key, value); LOGGER.debug("Setting: {}='{}'", key, getPrintableValue(key, value)); }
@Test public void testSetString() { String key = "newProperty"; String value = "someValue"; getSettings().setString(key, value); String expResults = getSettings().getString(key); Assert.assertEquals(expResults, value); }
public static LocalDate parseDate(CharSequence text) { return parseDate(text, (DateTimeFormatter) null); }
@Test public void parseDateTest() { LocalDate localDate = LocalDateTimeUtil.parseDate("2020-01-23"); assertEquals("2020-01-23", localDate.toString()); localDate = LocalDateTimeUtil.parseDate("2020-01-23T12:23:56", DateTimeFormatter.ISO_DATE_TIME); assertNotNull(localDate); assertEquals("2020-01-23", localDate.toString()); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void unbanChatMember() { BaseResponse response = bot.execute(new UnbanChatMember(channelName, chatId)); assertFalse(response.isOk()); assertEquals(400, response.errorCode()); assertEquals("Bad Request: can't remove chat owner", response.description()); // returns true for non-banned member with onlyIfBanned(true) response = bot.execute(new UnbanChatMember(channelName, chatId).onlyIfBanned(true)); assertTrue(response.isOk()); }
static void dissectControlResponse(final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, CMD_OUT_RESPONSE, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; CONTROL_RESPONSE_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); builder.append(": controlSessionId=").append(CONTROL_RESPONSE_DECODER.controlSessionId()) .append(" correlationId=").append(CONTROL_RESPONSE_DECODER.correlationId()) .append(" relevantId=").append(CONTROL_RESPONSE_DECODER.relevantId()) .append(" code=").append(CONTROL_RESPONSE_DECODER.code()) .append(" version=").append(CONTROL_RESPONSE_DECODER.version()) .append(" errorMessage="); CONTROL_RESPONSE_DECODER.getErrorMessage(builder); }
@Test void controlResponse() { internalEncodeLogHeader(buffer, 0, 100, 100, () -> 1_250_000_000); final ControlResponseEncoder responseEncoder = new ControlResponseEncoder(); responseEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .controlSessionId(13) .correlationId(42) .relevantId(8) .code(NULL_VAL) .version(111) .errorMessage("the %ERR% msg"); dissectControlResponse(buffer, 0, builder); assertEquals("[1.250000000] " + CONTEXT + ": " + CMD_OUT_RESPONSE.name() + " [100/100]: " + "controlSessionId=13" + " correlationId=42" + " relevantId=8" + " code=" + NULL_VAL + " version=111" + " errorMessage=the %ERR% msg", builder.toString()); }
@ScalarFunction public static double[] decodeGeoHash(String geohash) { return decode(geohash); }
@Test(dataProvider = "decodeHashTestCases") public void testDecodeHash(String geohash, double[] expectedCoords) { double[] decodedCoords = GeohashFunctions.decodeGeoHash(geohash); assertEquals(decodedCoords.length, 2); assertEquals(decodedCoords[0], expectedCoords[0], 0.001); assertEquals(decodedCoords[1], expectedCoords[1], 0.001); }
@Override public CompletableFuture<String> triggerSavepoint( @Nullable String targetDirectory, boolean cancelJob, SavepointFormatType formatType) { return state.tryCall( StateWithExecutionGraph.class, stateWithExecutionGraph -> { if (isAnyOutputBlocking(stateWithExecutionGraph.getExecutionGraph())) { return FutureUtils.<String>completedExceptionally( new CheckpointException( CheckpointFailureReason.BLOCKING_OUTPUT_EXIST)); } return stateWithExecutionGraph.triggerSavepoint( targetDirectory, cancelJob, formatType); }, "triggerSavepoint") .orElse( FutureUtils.completedExceptionally( new CheckpointException( "The Flink job is currently not executing.", CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE))); }
@Test void testTriggerSavepointFailsInIllegalState() throws Exception { final AdaptiveScheduler scheduler = new AdaptiveSchedulerBuilder( createJobGraph(), mainThreadExecutor, EXECUTOR_RESOURCE.getExecutor()) .build(); assertThatFuture( scheduler.triggerSavepoint( "some directory", false, SavepointFormatType.CANONICAL)) .eventuallyFailsWith(ExecutionException.class) .withCauseInstanceOf(CheckpointException.class); }
@UdafFactory(description = "Compute average of column with type Integer.", aggregateSchema = "STRUCT<SUM integer, COUNT bigint>") public static TableUdaf<Integer, Struct, Double> averageInt() { return getAverageImplementation( 0, STRUCT_INT, (sum, newValue) -> sum.getInt32(SUM) + newValue, (sum, count) -> sum.getInt32(SUM) / count, (sum1, sum2) -> sum1.getInt32(SUM) + sum2.getInt32(SUM), (sum, valueToUndo) -> sum.getInt32(SUM) - valueToUndo); }
@Test public void shouldAverageEmpty() { final TableUdaf<Integer, Struct, Double> udaf = AverageUdaf.averageInt(); final Struct agg = udaf.initialize(); final double avg = udaf.map(agg); assertThat(0.0, equalTo(avg)); }
@Operation(summary = "delUserById", description = "DELETE_USER_BY_ID_NOTES") @Parameters({ @Parameter(name = "id", description = "USER_ID", required = true, schema = @Schema(implementation = int.class, example = "100")) }) @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_USER_BY_ID_ERROR) public Result delUserById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id) throws Exception { Map<String, Object> result = usersService.deleteUserById(loginUser, id); return returnDataList(result); }
@Test public void testDelUserById() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id", "32"); MvcResult mvcResult = mockMvc.perform(post("/users/delete") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.USER_NOT_EXIST.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
@VisibleForTesting static Optional<String> getChildValue(@Nullable Xpp3Dom dom, String... childNodePath) { if (dom == null) { return Optional.empty(); } Xpp3Dom node = dom; for (String child : childNodePath) { node = node.getChild(child); if (node == null) { return Optional.empty(); } } return Optional.ofNullable(node.getValue()); }
@Test public void testGetChildValue_noChild() { Xpp3Dom root = newXpp3Dom("root", "value"); assertThat(MavenProjectProperties.getChildValue(root, "foo")).isEmpty(); assertThat(MavenProjectProperties.getChildValue(root, "foo", "bar")).isEmpty(); }
@Override public Integer getJavaVersion() { return jarJavaVersion; }
@Test public void testGetJavaVersion() { SpringBootPackagedProcessor springBootPackagedProcessor = new SpringBootPackagedProcessor(Paths.get("ignore"), 8); assertThat(springBootPackagedProcessor.getJavaVersion()).isEqualTo(8); }
protected static SimpleDateFormat getLog4j2Appender() { Optional<Appender> log4j2xmlAppender = configuration.getAppenders().values().stream() .filter( a -> a.getName().equalsIgnoreCase( log4J2Appender ) ).findFirst(); if ( log4j2xmlAppender.isPresent() ) { ArrayList<String> matchesArray = new ArrayList<>(); String dateFormatFromLog4j2xml = log4j2xmlAppender.get().getLayout().getContentFormat().get( "format" ); Pattern pattern = Pattern.compile( "(\\{(.*?)})" ); Matcher matcher = pattern.matcher( dateFormatFromLog4j2xml ); while ( matcher.find() ) { matchesArray.add( matcher.group( 2 ) ); } if ( !matchesArray.isEmpty() ) { return processMatches( matchesArray ); } } return new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss" ); }
@Test public void testGetLog4j2UsingAppender2() { // This will throw an Illegal pattern character 'n' Exception parsing to Java SimpleDateFormat() and set Default // Pattern value "yyyy/MM/dd HH:mm:ss" KettleLogLayout.log4J2Appender = "pdi-execution-appender-test-2"; Assert.assertNotSame( "HH:mm:ss,nnnn", KettleLogLayout.getLog4j2Appender().toPattern() ); Assert.assertEquals( "yyyy/MM/dd HH:mm:ss", KettleLogLayout.getLog4j2Appender().toPattern() ); }
synchronized void ensureTokenInitialized() throws IOException { // we haven't inited yet, or we used to have a token but it expired if (!hasInitedToken || (action != null && !action.isValid())) { //since we don't already have a token, go get one Token<?> token = fs.getDelegationToken(null); // security might be disabled if (token != null) { fs.setDelegationToken(token); addRenewAction(fs); LOG.debug("Created new DT for {}", token.getService()); } hasInitedToken = true; } }
@Test public void testCachedInitialization() throws IOException, URISyntaxException { Configuration conf = new Configuration(); DummyFs fs = spy(new DummyFs()); Token<TokenIdentifier> token = new Token<TokenIdentifier>(new byte[0], new byte[0], DummyFs.TOKEN_KIND, new Text("127.0.0.1:1234")); doReturn(token).when(fs).getDelegationToken(any()); doReturn(token).when(fs).getRenewToken(); fs.emulateSecurityEnabled = true; fs.initialize(new URI("dummyfs://127.0.0.1:1234"), conf); fs.tokenAspect.ensureTokenInitialized(); verify(fs, times(1)).getDelegationToken(null); verify(fs, times(1)).setDelegationToken(token); // For the second iteration, the token should be cached. fs.tokenAspect.ensureTokenInitialized(); verify(fs, times(1)).getDelegationToken(null); verify(fs, times(1)).setDelegationToken(token); }
public Pair<Long, Jwts> refresh(String refreshToken) { JwtClaims claims = refreshTokenProvider.getJwtClaimsFromToken(refreshToken); Long userId = JwtClaimsParserUtil.getClaimsValue(claims, RefreshTokenClaimKeys.USER_ID.getValue(), Long::parseLong); String role = JwtClaimsParserUtil.getClaimsValue(claims, RefreshTokenClaimKeys.ROLE.getValue(), String.class); log.debug("refresh token userId : {}, role : {}", userId, role); RefreshToken newRefreshToken; try { newRefreshToken = refreshTokenService.refresh(userId, refreshToken, refreshTokenProvider.generateToken(RefreshTokenClaim.of(userId, role))); log.debug("new refresh token : {}", newRefreshToken.getToken()); } catch (IllegalArgumentException e) { throw new JwtErrorException(JwtErrorCode.EXPIRED_TOKEN); } catch (IllegalStateException e) { throw new JwtErrorException(JwtErrorCode.TAKEN_AWAY_TOKEN); } String newAccessToken = accessTokenProvider.generateToken(AccessTokenClaim.of(userId, role)); log.debug("new access token : {}", newAccessToken); return Pair.of(userId, Jwts.of(newAccessToken, newRefreshToken.getToken())); }
@Test @DisplayName("사용자 아이디에 해당하는 리프레시 토큰이 존재할 시, 리프레시 토큰 갱신에 성공한다.") public void RefreshTokenRefreshSuccess() { // given RefreshToken refreshToken = RefreshToken.builder() .userId(1L) .token("refreshToken") .ttl(1000L) .build(); refreshTokenRepository.save(refreshToken); given(refreshTokenProvider.getJwtClaimsFromToken(refreshToken.getToken())).willReturn(RefreshTokenClaim.of(refreshToken.getUserId(), Role.USER.getType())); given(accessTokenProvider.generateToken(any())).willReturn("newAccessToken"); given(refreshTokenProvider.generateToken(any())).willReturn("newRefreshToken"); // when Pair<Long, Jwts> jwts = jwtAuthHelper.refresh(refreshToken.getToken()); // then assertEquals("사용자 아이디가 일치하지 않습니다.", refreshToken.getUserId(), jwts.getLeft()); assertEquals("갱신된 액세스 토큰이 일치하지 않습니다.", "newAccessToken", jwts.getRight().accessToken()); assertEquals("리프레시 토큰이 갱신되지 않았습니다.", "newRefreshToken", jwts.getRight().refreshToken()); log.info("갱신된 리프레시 토큰 정보 : {}", refreshTokenRepository.findById(refreshToken.getUserId()).orElse(null)); }
@Override public void configure(Map<String, ?> configs, boolean isKey) { if (listClass != null || inner != null) { log.error("Could not configure ListDeserializer as some parameters were already set -- listClass: {}, inner: {}", listClass, inner); throw new ConfigException("List deserializer was already initialized using a non-default constructor"); } configureListClass(configs, isKey); configureInnerSerde(configs, isKey); }
@Test public void testListKeyDeserializerNoArgConstructorsShouldThrowConfigExceptionDueListClassNotFound() { props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_TYPE_CLASS, nonExistingClass); props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS, Serdes.StringSerde.class); final ConfigException exception = assertThrows( ConfigException.class, () -> listDeserializer.configure(props, true) ); assertEquals("Invalid value " + nonExistingClass + " for configuration " + CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_TYPE_CLASS + ": Deserializer's list class " + "\"" + nonExistingClass + "\" could not be found.", exception.getMessage()); }
@POST @Timed @ApiOperation(value = "Resolve dependencies of entities and return their configuration") @RequiresPermissions(RestPermissions.CATALOG_RESOLVE) @NoAuditEvent("this is not changing any data") public CatalogResolveResponse resolveEntities( @ApiParam(name = "JSON body", required = true) @Valid @NotNull CatalogResolveRequest request) { final Set<EntityDescriptor> requestedEntities = request.entities(); final Set<EntityDescriptor> resolvedEntities = contentPackService.resolveEntities(requestedEntities); final ImmutableSet<Entity> entities = contentPackService.collectEntities(resolvedEntities); return CatalogResolveResponse.create(entities); }
@Test public void resolveEntities() { final EntityDescriptor entityDescriptor = EntityDescriptor.builder() .id(ModelId.of("1234567890")) .type(ModelType.of("test", "1")) .build(); final MutableGraph<EntityDescriptor> entityDescriptors = GraphBuilder.directed().build(); entityDescriptors.addNode(entityDescriptor); final EntityV1 entity = EntityV1.builder() .id(ModelId.of("1234567890")) .type(ModelType.of("test", "1")) .data(new ObjectNode(JsonNodeFactory.instance).put("test", "1234")) .build(); when(mockEntityFacade.resolveNativeEntity(entityDescriptor)).thenReturn(entityDescriptors); when(mockEntityFacade.exportEntity(eq(entityDescriptor), any(EntityDescriptorIds.class))).thenReturn(Optional.of(entity)); final CatalogResolveRequest request = CatalogResolveRequest.create(entityDescriptors.nodes()); final CatalogResource resource = new CatalogResource(contentPackService, (r, p) -> EntitiesTitleResponse.EMPTY_RESPONSE); final CatalogResolveResponse catalogResolveResponse = resource.resolveEntities(request); assertThat(catalogResolveResponse.entities()).containsOnly(entity); }
@Nonnull public static Number shiftRightU(@Nonnull Number value, @Nonnull Number shift) { // Check for widest types first, go down the type list to narrower types until reaching int. if (value instanceof Long) { return value.longValue() >>> shift.longValue(); } else { return value.intValue() >>> shift.intValue(); } }
@Test void testShiftRightU() { assertEquals(16 >> 1, NumberUtil.shiftRightU(16, 1)); assertEquals(16L >> 1, NumberUtil.shiftRightU(16L, 1)); }
public RuntimeOptionsBuilder parse(String... args) { return parse(Arrays.asList(args)); }
@Test void scans_class_path_root_for_glue_by_default() { RuntimeOptions options = parser .parse() .addDefaultGlueIfAbsent() .build(); assertThat(options.getGlue(), is(singletonList(rootPackageUri()))); }
@Override @SuppressWarnings("unchecked") @SuppressFBWarnings("SERVLET_QUERY_STRING") public void include(ServletRequest servletRequest, ServletResponse servletResponse) throws ServletException, IOException { if (lambdaContainerHandler == null) { throw new IllegalStateException("Null container handler in dispatcher"); } if (servletResponse.isCommitted()) { throw new IllegalStateException("Cannot forward request with committed response"); } servletRequest.setAttribute(DISPATCHER_TYPE_ATTRIBUTE, DispatcherType.INCLUDE); if (!isNamedDispatcher) { servletRequest.setAttribute("javax.servlet.include.request_uri", ((HttpServletRequest)servletRequest).getRequestURI()); servletRequest.setAttribute("javax.servlet.include.context_path", ((HttpServletRequest) servletRequest).getContextPath()); servletRequest.setAttribute("javax.servlet.include.servlet_path", ((HttpServletRequest) servletRequest).getServletPath()); servletRequest.setAttribute("javax.servlet.include.path_info", ((HttpServletRequest) servletRequest).getPathInfo()); servletRequest.setAttribute("javax.servlet.include.query_string", SecurityUtils.encode(SecurityUtils.crlf(((HttpServletRequest) servletRequest).getQueryString()))); setRequestPath(servletRequest, dispatchTo); } lambdaContainerHandler.doFilter((HttpServletRequest) servletRequest, (HttpServletResponse) servletResponse, getServlet((HttpServletRequest)servletRequest)); }
@Test void include_appendsNewHeader_cannotAppendNewHeaders() throws InvalidRequestEventException, IOException { final String firstPart = "first"; final String secondPart = "second"; final String headerKey = "X-Custom-Header"; AwsProxyRequest proxyRequest = new AwsProxyRequestBuilder("/hello", "GET").build(); AwsProxyResponse resp = mockLambdaHandler((AwsProxyHttpServletRequest req, AwsHttpServletResponse res) -> { if (req.getAttribute("cnt") == null) { res.getOutputStream().write(firstPart.getBytes()); req.setAttribute("cnt", 1); req.getRequestDispatcher("/includer").include(req, res); res.setStatus(200); res.flushBuffer(); } else { res.getOutputStream().write(secondPart.getBytes()); res.addHeader(headerKey, "value"); } }).proxy(proxyRequest, new MockLambdaContext()); assertEquals(firstPart + secondPart, resp.getBody()); assertFalse(resp.getMultiValueHeaders().containsKey(headerKey)); }
public long getLong(HazelcastProperty property) { return Long.parseLong(getString(property)); }
@Test public void getLong() { long lockMaxLeaseTimeSeconds = defaultProperties.getLong(ClusterProperty.LOCK_MAX_LEASE_TIME_SECONDS); assertEquals(Long.MAX_VALUE, lockMaxLeaseTimeSeconds); }
public static AvroGenericCoder of(Schema schema) { return AvroGenericCoder.of(schema); }
@Test @Category(NeedsRunner.class) public void testDefaultCoder() throws Exception { // Use MyRecord as input and output types without explicitly specifying // a coder (this uses the default coders, which may not be AvroCoder). PCollection<String> output = pipeline .apply(Create.of(new Pojo("hello", 1, DATETIME_A), new Pojo("world", 2, DATETIME_B))) .apply(ParDo.of(new GetTextFn())); PAssert.that(output).containsInAnyOrder("hello", "world"); pipeline.run(); }
public static LocalDateTime parse(String dateTime, DateTimeFormatter dateTimeFormatter) { TemporalAccessor parsedTimestamp = dateTimeFormatter.parse(dateTime); LocalTime localTime = parsedTimestamp.query(TemporalQueries.localTime()); LocalDate localDate = parsedTimestamp.query(TemporalQueries.localDate()); return LocalDateTime.of(localDate, localTime); }
@Test public void testParseDateString() { final String datetime = "2023-12-22 00:00:00"; LocalDateTime parse = DateTimeUtils.parse(datetime, Formatter.YYYY_MM_DD_HH_MM_SS); Assertions.assertEquals(0, parse.getMinute()); Assertions.assertEquals(0, parse.getHour()); Assertions.assertEquals(0, parse.getSecond()); Assertions.assertEquals(22, parse.getDayOfMonth()); Assertions.assertEquals(12, parse.getMonth().getValue()); Assertions.assertEquals(2023, parse.getYear()); Assertions.assertEquals(22, parse.getDayOfMonth()); }
public static KeyStore loadKeyStore(final String name, final char[] password) { InputStream stream = null; try { stream = Config.getInstance().getInputStreamFromFile(name); if (stream == null) { String message = "Unable to load keystore '" + name + "', please provide the keystore matching the configuration in client.yml/server.yml to enable TLS connection."; if (logger.isErrorEnabled()) { logger.error(message); } throw new RuntimeException(message); } // try to load keystore as JKS try { KeyStore loadedKeystore = KeyStore.getInstance("JKS"); loadedKeystore.load(stream, password); return loadedKeystore; } catch (Exception e) { // if JKS fails, attempt to load as PKCS12 try { stream.close(); stream = Config.getInstance().getInputStreamFromFile(name); KeyStore loadedKeystore = KeyStore.getInstance("PKCS12"); loadedKeystore.load(stream, password); return loadedKeystore; } catch (Exception e2) { logger.error("Unable to load keystore " + name, e2); throw new RuntimeException("Unable to load keystore " + name, e2); } } } catch (Exception e) { logger.error("Unable to load stream for keystore " + name, e); throw new RuntimeException("Unable to load stream for keystore " + name, e); } finally { if (stream != null) { try { stream.close(); } catch (IOException e) { logger.error("Unable to close stream for keystore " + name, e); } } } }
@Test public void testLoadPKCS12KeyStore() { KeyStore keyStore = TlsUtil.loadKeyStore("serverpkcs12.keystore", PASSWORD); Assert.assertNotNull(keyStore); }
public List<R> scanForClasspathResource(String resourceName, Predicate<String> packageFilter) { requireNonNull(resourceName, "resourceName must not be null"); requireNonNull(packageFilter, "packageFilter must not be null"); List<URI> urisForResource = getUrisForResource(getClassLoader(), resourceName); BiFunction<Path, Path, Resource> createResource = createClasspathResource(resourceName); return findResourcesForUris(urisForResource, DEFAULT_PACKAGE_NAME, packageFilter, createResource); }
@Test void scanForClasspathResourceWithSpaces() { String resourceName = "io/cucumber/core/resource/test/spaces in name resource.txt"; List<URI> resources = resourceScanner.scanForClasspathResource(resourceName, aPackage -> true); assertThat(resources, contains(URI.create("classpath:io/cucumber/core/resource/test/spaces%20in%20name%20resource.txt"))); }
static VertexWithInputConfig join( DAG dag, String mapName, String tableName, JetJoinInfo joinInfo, KvRowProjector.Supplier rightRowProjectorSupplier ) { int leftEquiJoinPrimitiveKeyIndex = leftEquiJoinPrimitiveKeyIndex(joinInfo, rightRowProjectorSupplier.paths()); if (leftEquiJoinPrimitiveKeyIndex > -1) { // This branch handles the case when there's an equi-join condition for the __key field. // For example: SELECT * FROM left [LEFT] JOIN right ON left.field1=right.__key // In this case we'll use map.get() for the right map to get the matching entry by key and evaluate the // remaining conditions on the returned row. return new VertexWithInputConfig( dag.newUniqueVertex( "Join(Lookup-" + tableName + ")", new JoinByPrimitiveKeyProcessorSupplier( joinInfo.isInner(), leftEquiJoinPrimitiveKeyIndex, joinInfo.condition(), mapName, rightRowProjectorSupplier ) ), edge -> edge.distributed().partitioned(extractPrimitiveKeyFn(leftEquiJoinPrimitiveKeyIndex)) ); } else if (joinInfo.isEquiJoin()) { // This branch handles the case when there's an equi-join, but not for __key (that was handled above) // For example: SELECT * FROM left JOIN right ON left.field1=right.field1 // In this case we'll construct a com.hazelcast.query.Predicate that will find matching rows using // the `map.entrySet(predicate)` method. assert joinInfo.isLeftOuter() || joinInfo.isInner(); return new VertexWithInputConfig( dag.newUniqueVertex( "Join(Predicate-" + tableName + ")", JoinByEquiJoinProcessorSupplier.supplier(joinInfo, mapName, rightRowProjectorSupplier) ), edge -> { // In case of an inner join we'll use `entrySet(predicate, partitionIdSet)` - we'll fan-out each // left item to all members and each member will query a subset of partitions (the local ones). // Otherwise, a default edge is used (local unicast) if (joinInfo.isInner()) { edge.distributed().fanout(); } }); } else { // This is the fallback case when there's not an equi-join: it can be a cross-join or join on // another condition. For example: // SELECT * FROM houses h JOIN renters r WHERE h.rent BETWEEN r.min_rent AND r.max_rent return new VertexWithInputConfig( dag.newUniqueVertex( "Join(Scan-" + tableName + ")", new JoinScanProcessorSupplier(joinInfo, mapName, rightRowProjectorSupplier) ) ); } // TODO: detect and handle always-false condition ? }
@Test @Parameters(method = "joinTypes") public void test_joinByPredicate(JoinRelType joinType) { // given given(rightRowProjectorSupplier.paths()).willReturn(new QueryPath[]{QueryPath.create("path")}); given(dag.newUniqueVertex(contains("Predicate"), isA(ProcessorMetaSupplier.class))).willReturn(vertex); // when VertexWithInputConfig vertexWithConfig = Joiner.join( dag, "imap-name", "table-name", joinInfo(joinType, new int[]{0}, new int[]{0}), rightRowProjectorSupplier ); // then assertThat(vertexWithConfig.vertex()).isEqualTo(vertex); assertThat(vertexWithConfig.configureEdgeFn()).isNotNull(); }
static boolean isValidComparison( final SqlType left, final ComparisonExpression.Type operator, final SqlType right ) { if (left == null || right == null) { throw nullSchemaException(left, operator, right); } return HANDLERS.stream() .filter(h -> h.handles.test(left.baseType())) .findFirst() .map(h -> h.validator.test(operator, right)) .orElse(false); }
@SuppressWarnings("ConstantConditions") @Test public void shouldNotCompareLeftNullSchema() { // When: final Exception e = assertThrows( KsqlException.class, () -> ComparisonUtil.isValidComparison(null, ComparisonExpression.Type.EQUAL, SqlTypes.STRING) ); // Then: assertThat(e.getMessage(), containsString( "Comparison with NULL not supported: NULL = STRING")); }
@Override public String getName() { return "AppVeyor"; }
@Test public void getName() { assertThat(underTest.getName()).isEqualTo("AppVeyor"); }
public final void isAtLeast(int other) { isAtLeast((double) other); }
@Test public void isAtLeast_int() { expectFailureWhenTestingThat(2.0).isAtLeast(3); assertThat(2.0).isAtLeast(2); assertThat(2.0).isAtLeast(1); }
public Statistics getTableStatistics( OptimizerContext session, Table table, List<ColumnRefOperator> columns, List<PartitionKey> partitionKeys) { Statistics.Builder builder = Statistics.builder(); HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table; if (hmsTbl.isUnPartitioned()) { HivePartitionStats tableStats = hmsOps.getTableStatistics(hmsTbl.getDbName(), hmsTbl.getTableName()); return createUnpartitionedStats(tableStats, columns, builder, table); } int sampleSize = getSamplePartitionSize(session); List<String> partitionColumnNames = hmsTbl.getPartitionColumnNames(); List<String> partitionNames = partitionKeys.stream() .peek(partitionKey -> checkState(partitionKey.getKeys().size() == partitionColumnNames.size(), "columns size is " + partitionColumnNames.size() + " but values size is " + partitionKey.getKeys().size())) .map(partitionKey -> toHivePartitionName(partitionColumnNames, partitionKey)) .collect(Collectors.toList()); List<String> sampledPartitionNames = getPartitionsSample(partitionNames, sampleSize); Map<String, HivePartitionStats> partitionStatistics = hmsOps.getPartitionStatistics(table, sampledPartitionNames); double avgRowNumPerPartition = -1; double totalRowNums = -1; avgRowNumPerPartition = getPerPartitionRowAvgNums(partitionStatistics.values()); if (avgRowNumPerPartition <= 0) { builder.setOutputRowCount(getEstimatedRowCount(table, partitionKeys)); return builder.build(); } totalRowNums = avgRowNumPerPartition * partitionKeys.size(); builder.setOutputRowCount(totalRowNums); for (ColumnRefOperator columnRefOperator : columns) { Column column = table.getColumn(columnRefOperator.getName()); if (partitionColumnNames.contains(columnRefOperator.getName())) { builder.addColumnStatistic(columnRefOperator, createPartitionColumnStatistics( column, partitionKeys, partitionStatistics, partitionColumnNames, avgRowNumPerPartition, totalRowNums)); } else { builder.addColumnStatistic(columnRefOperator, createDataColumnStatistics( column, totalRowNums, partitionStatistics.values())); } } return builder.build(); }
@Test public void testGetTableStatistics() throws AnalysisException { HiveTable hiveTable = (HiveTable) hmsOps.getTable("db1", "table1"); ColumnRefOperator partColumnRefOperator = new ColumnRefOperator(0, Type.INT, "col1", true); ColumnRefOperator dataColumnRefOperator = new ColumnRefOperator(1, Type.INT, "col2", true); PartitionKey hivePartitionKey1 = PartitionUtil.createPartitionKey( Lists.newArrayList("1"), hiveTable.getPartitionColumns()); PartitionKey hivePartitionKey2 = PartitionUtil.createPartitionKey( Lists.newArrayList("2"), hiveTable.getPartitionColumns()); Statistics statistics = statisticsProvider.getTableStatistics( optimizerContext, hiveTable, Lists.newArrayList(partColumnRefOperator, dataColumnRefOperator), Lists.newArrayList(hivePartitionKey1, hivePartitionKey2)); Assert.assertEquals(1, statistics.getOutputRowCount(), 0.001); Assert.assertEquals(0, statistics.getColumnStatistics().size()); cachingHiveMetastore.getPartitionStatistics(hiveTable, Lists.newArrayList("col1=1", "col1=2")); statistics = statisticsProvider.getTableStatistics( optimizerContext, hiveTable, Lists.newArrayList(partColumnRefOperator, dataColumnRefOperator), Lists.newArrayList(hivePartitionKey1, hivePartitionKey2)); Assert.assertEquals(100, statistics.getOutputRowCount(), 0.001); Map<ColumnRefOperator, ColumnStatistic> columnStatistics = statistics.getColumnStatistics(); Assert.assertEquals(2, statistics.getColumnStatistics().size()); ColumnStatistic partitionColumnStats = columnStatistics.get(partColumnRefOperator); Assert.assertEquals(1, partitionColumnStats.getMinValue(), 0.001); Assert.assertEquals(2, partitionColumnStats.getMaxValue(), 0.001); Assert.assertEquals(0, partitionColumnStats.getNullsFraction(), 0.001); Assert.assertEquals(4, partitionColumnStats.getAverageRowSize(), 0.001); Assert.assertEquals(2, partitionColumnStats.getDistinctValuesCount(), 0.001); ColumnStatistic dataColumnStats = columnStatistics.get(dataColumnRefOperator); Assert.assertEquals(0, dataColumnStats.getMinValue(), 0.001); Assert.assertEquals(0.03, dataColumnStats.getNullsFraction(), 0.001); Assert.assertEquals(4, dataColumnStats.getAverageRowSize(), 0.001); Assert.assertEquals(5, dataColumnStats.getDistinctValuesCount(), 0.001); }
public static String config() { return "v1/config"; }
@Test public void testConfigPath() { // prefix does not affect the config route because config is merged into catalog properties assertThat(ResourcePaths.config()).isEqualTo("v1/config"); }
@Override public void preflight(final Path source, final Path target) throws BackgroundException { if(!CteraTouchFeature.validate(target.getName())) { throw new InvalidFilenameException(MessageFormat.format(LocaleFactory.localizedString("Cannot rename {0}", "Error"), source.getName())).withFile(source); } assumeRole(source, DELETEPERMISSION); // defaults to Acl.EMPTY (disabling role checking) if target does not exist assumeRole(target, WRITEPERMISSION); // no createfilespermission required for now if(source.isDirectory()) { assumeRole(target.getParent(), target.getName(), CREATEDIRECTORIESPERMISSION); } }
@Test public void testPreflightDirectoryAccessDeniedTargetNoCreatedirectoriesPermissionCustomProps() throws Exception { final Path source = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); source.setAttributes(source.attributes().withAcl(new Acl(new Acl.CanonicalUser(), CteraAttributesFinderFeature.DELETEPERMISSION))); final Path target = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); // target's parent without createdirectories permission target.getParent().setAttributes(target.getParent().attributes().withAcl(new Acl(new Acl.CanonicalUser()))); final AccessDeniedException accessDeniedException = assertThrows(AccessDeniedException.class, () -> new CteraMoveFeature(session).preflight(source, target)); assertTrue(accessDeniedException.getDetail().contains(MessageFormat.format(LocaleFactory.localizedString("Cannot create folder {0}", "Error"), target.getName()))); }
public static <T> List<T> notNullElements(List<T> list, String name) { notNull(list, name); for (int i = 0; i < list.size(); i++) { notNull(list.get(i), MessageFormat.format("list [{0}] element [{1}]", name, i)); } return list; }
@Test public void notNullElementsNotNull() { Check.notNullElements(new ArrayList<String>(), "name"); Check.notNullElements(Arrays.asList("a"), "name"); }
public boolean validate(final Protocol protocol, final LoginOptions options) { return protocol.validate(this, options); }
@Test public void testLoginAnonymous1() { Credentials credentials = new Credentials(PreferencesFactory.get().getProperty("connection.login.anon.name"), PreferencesFactory.get().getProperty("connection.login.anon.pass")); assertTrue(credentials.validate(new TestProtocol(Scheme.ftp), new LoginOptions())); }
public static int readIntBE(byte[] buffer, int offset) { return ((buffer[offset] & 0xFF) << 24) | ((buffer[offset + 1] & 0xFF) << 16) | ((buffer[offset + 2] & 0xFF) << 8) | (buffer[offset + 3] & 0xFF); }
@Test public void testReadInt() { int[] values = { 0, 1, -1, Byte.MAX_VALUE, Short.MAX_VALUE, 2 * Short.MAX_VALUE, Integer.MAX_VALUE / 2, Integer.MIN_VALUE / 2, Integer.MAX_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE }; ByteBuffer buffer = ByteBuffer.allocate(4 * values.length); for (int i = 0; i < values.length; ++i) { buffer.putInt(i * 4, values[i]); assertEquals(values[i], ByteUtils.readIntBE(buffer.array(), i * 4), "Written value should match read value."); } }
@Override public <K, T> UncommittedBundle<T> createKeyedBundle( StructuralKey<K> key, PCollection<T> output) { return new CloningBundle<>(underlying.createKeyedBundle(key, output)); }
@Test public void keyedBundleDecodeFailsAddFails() { PCollection<Record> pc = p.apply(Create.empty(new RecordNoDecodeCoder())); UncommittedBundle<Record> bundle = factory.createKeyedBundle(StructuralKey.of("foo", StringUtf8Coder.of()), pc); thrown.expect(UserCodeException.class); thrown.expectCause(isA(CoderException.class)); thrown.expectMessage("Decode not allowed"); bundle.add(WindowedValue.valueInGlobalWindow(new Record())); }
public static boolean isCompositeType(LogicalType logicalType) { if (logicalType instanceof DistinctType) { return isCompositeType(((DistinctType) logicalType).getSourceType()); } LogicalTypeRoot typeRoot = logicalType.getTypeRoot(); return typeRoot == STRUCTURED_TYPE || typeRoot == ROW; }
@Test void testIsCompositeTypeStructuredType() { StructuredType logicalType = StructuredType.newBuilder(ObjectIdentifier.of("catalog", "database", "type")) .attributes( Arrays.asList( new StructuredType.StructuredAttribute( "f0", DataTypes.INT().getLogicalType()), new StructuredType.StructuredAttribute( "f1", DataTypes.STRING().getLogicalType()))) .build(); List<DataType> fieldDataTypes = Arrays.asList(DataTypes.INT(), DataTypes.STRING()); FieldsDataType dataType = new FieldsDataType(logicalType, fieldDataTypes); assertThat(LogicalTypeChecks.isCompositeType(dataType.getLogicalType())).isTrue(); }
@Override public String getDataXML( Object object ) throws IOException { StringBuilder xml = new StringBuilder(); String string; if ( object != null ) { try { switch ( storageType ) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL // switch ( getType() ) { case TYPE_STRING: string = (String) object; break; case TYPE_NUMBER: string = Double.toString( (Double) object ); break; case TYPE_INTEGER: string = Long.toString( (Long) object ); break; case TYPE_DATE: string = XMLHandler.date2string( (Date) object ); break; case TYPE_BIGNUMBER: string = ( (BigDecimal) object ).toString(); break; case TYPE_BOOLEAN: string = Boolean.toString( (Boolean) object ); break; case TYPE_BINARY: string = XMLHandler.encodeBinaryData( (byte[]) object ); break; case TYPE_TIMESTAMP: string = XMLHandler.timestamp2string( (Timestamp) object ); break; case TYPE_INET: string = ( (InetAddress) object ).toString(); break; default: throw new IOException( toString() + " : Unable to serialize data type to XML " + getType() ); } break; case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue // at all. // string = XMLHandler.addTagValue( "binary-string", (byte[]) object ); xml.append( XMLHandler.openTag( XML_DATA_TAG ) ).append( string ).append( XMLHandler.closeTag( XML_DATA_TAG ) ); return xml.toString(); case STORAGE_TYPE_INDEXED: // Just an index string = XMLHandler.addTagValue( "index-value", (Integer) object ); break; default: throw new IOException( toString() + " : Unknown storage type " + getStorageType() ); } } catch ( ClassCastException e ) { throw new RuntimeException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]", e ); } catch ( Exception e ) { throw new RuntimeException( toString() + " : there was a value XML encoding error", e ); } } else { // If the object is null: give an empty string // string = ""; } xml.append( XMLHandler.addTagValue( XML_DATA_TAG, string ) ); return xml.toString(); }
@Test public void testGetDataXML() throws IOException { BigDecimal bigDecimal = BigDecimal.ONE; ValueMetaBase valueDoubleMetaBase = new ValueMetaBase( String.valueOf( bigDecimal ), ValueMetaInterface.TYPE_BIGNUMBER ); assertEquals( "<value-data>" + Encode.forXml( String.valueOf( bigDecimal ) ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, valueDoubleMetaBase.getDataXML( bigDecimal ) ); boolean valueBoolean = Boolean.TRUE; ValueMetaBase valueBooleanMetaBase = new ValueMetaBase( String.valueOf( valueBoolean ), ValueMetaInterface.TYPE_BOOLEAN ); assertEquals( "<value-data>" + Encode.forXml( String.valueOf( valueBoolean ) ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, valueBooleanMetaBase.getDataXML( valueBoolean ) ); Date date = new Date( 0 ); ValueMetaBase dateMetaBase = new ValueMetaBase( date.toString(), ValueMetaInterface.TYPE_DATE ); SimpleDateFormat formaterData = new SimpleDateFormat( ValueMetaBase.DEFAULT_DATE_FORMAT_MASK ); assertEquals( "<value-data>" + Encode.forXml( formaterData.format( date ) ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, dateMetaBase.getDataXML( date ) ); InetAddress inetAddress = InetAddress.getByName( "127.0.0.1" ); ValueMetaBase inetAddressMetaBase = new ValueMetaBase( inetAddress.toString(), ValueMetaInterface.TYPE_INET ); assertEquals( "<value-data>" + Encode.forXml( inetAddress.toString() ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, inetAddressMetaBase.getDataXML( inetAddress ) ); long value = Long.MAX_VALUE; ValueMetaBase integerMetaBase = new ValueMetaBase( String.valueOf( value ), ValueMetaInterface.TYPE_INTEGER ); assertEquals( "<value-data>" + Encode.forXml( String.valueOf( value ) ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, integerMetaBase.getDataXML( value ) ); String stringValue = "TEST_STRING"; ValueMetaBase valueMetaBase = new ValueMetaString( stringValue ); assertEquals( "<value-data>" + Encode.forXml( stringValue ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, valueMetaBase.getDataXML( stringValue ) ); Timestamp timestamp = new Timestamp( 0 ); ValueMetaBase valueMetaBaseTimeStamp = new ValueMetaBase( timestamp.toString(), ValueMetaInterface.TYPE_TIMESTAMP ); SimpleDateFormat formater = new SimpleDateFormat( ValueMetaBase.DEFAULT_TIMESTAMP_FORMAT_MASK ); assertEquals( "<value-data>" + Encode.forXml( formater.format( timestamp ) ) + "</value-data>" + SystemUtils.LINE_SEPARATOR, valueMetaBaseTimeStamp.getDataXML( timestamp ) ); byte[] byteTestValues = { 0, 1, 2, 3 }; ValueMetaBase valueMetaBaseByteArray = new ValueMetaBase( byteTestValues.toString(), ValueMetaInterface.TYPE_STRING ); valueMetaBaseByteArray.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); assertEquals( "<value-data><binary-string>" + Encode.forXml( XMLHandler.encodeBinaryData( byteTestValues ) ) + "</binary-string>" + Const.CR + "</value-data>", valueMetaBaseByteArray.getDataXML( byteTestValues ) ); }
public static Application fromServicesXml(String xml, Networking networking) { Path applicationDir = StandaloneContainerRunner.createApplicationPackage(xml); return new Application(applicationDir, networking, true); }
@Test void minimal_application_can_be_constructed() { try (Application application = Application.fromServicesXml("<container version=\"1.0\"/>", Networking.disable)) { } }
@Override public Publisher<Exchange> to(String uri, Object data) { String streamName = requestedUriToStream.computeIfAbsent(uri, camelUri -> { try { String uuid = context.getUuidGenerator().generateUuid(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("reactive-streams:" + uuid) .to(camelUri); } }); return uuid; } catch (Exception e) { throw new IllegalStateException("Unable to create requested reactive stream from direct URI: " + uri, e); } }); return toStream(streamName, data); }
@Test public void testToFunctionWithExchange() throws Exception { context.start(); Set<String> values = Collections.synchronizedSet(new TreeSet<>()); CountDownLatch latch = new CountDownLatch(3); Function<Object, Publisher<Exchange>> fun = crs.to("bean:hello"); Flux.just(1, 2, 3) .flatMap(fun) .map(Exchange::getMessage) .map(e -> e.getBody(String.class)) .doOnNext(values::add) .doOnNext(res -> latch.countDown()) .subscribe(); assertTrue(latch.await(2, TimeUnit.SECONDS)); assertEquals(new TreeSet<>(Arrays.asList("Hello 1", "Hello 2", "Hello 3")), values); }
public static List<String> orderFields(List<String> fields, List<SortSpec> sorts) { if (!needsReorderingFields(fields, sorts)) { return fields; } final List<String> sortFields = sorts.stream() .filter(ValuesBucketOrdering::isGroupingSort) .map(SortSpec::field) .collect(Collectors.toList()); return fields.stream() .sorted(new FieldsSortingComparator(sortFields)) .collect(Collectors.toList()); }
@Test void pivotUsedForSortIsPulledToTop() { final List<SortSpec> pivotSorts = List.of(PivotSort.create("baz", SortSpec.Direction.Descending)); final List<String> orderedBuckets = ValuesBucketOrdering.orderFields(List.of("foo", "bar", "baz"), pivotSorts); assertThat(orderedBuckets).containsExactly("baz", "foo", "bar"); }
@SuppressWarnings("unused") // Required for automatic type inference public static <K> Builder0<K> forClass(final Class<K> type) { return new Builder0<>(); }
@Test public void shouldWorkWithSuppliers1() { // Given: handlerMap1 = HandlerMaps.forClass(BaseType.class).withArgType(String.class) .put(LeafTypeA.class, () -> handler1_1) .build(); // When: handlerMap1.get(LeafTypeA.class).handle("A", LEAF_A); // Then: verify(handler1_1).handle("A", LEAF_A); }
public static HazelcastInstance newHazelcastInstance(Config config) { if (config == null) { config = Config.load(); } return newHazelcastInstance( config, config.getInstanceName(), new DefaultNodeContext() ); }
@Test(expected = IllegalStateException.class) public void test_NewInstance_terminateInstance_afterNodeStart() throws Exception { NodeContext context = new TestNodeContext() { @Override public NodeExtension createNodeExtension(final Node node) { NodeExtension nodeExtension = super.createNodeExtension(node); doAnswer(invocation -> { node.hazelcastInstance.shutdown(); return null; }).when(nodeExtension).afterStart(); return nodeExtension; } }; Config config = new Config(); config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(false); hazelcastInstance = HazelcastInstanceFactory.newHazelcastInstance(config, randomString(), context); }
@Override public String named() { return PluginEnum.WEB_SOCKET.getName(); }
@Test public void namedTest() { assertEquals(PluginEnum.WEB_SOCKET.getName(), webSocketPlugin.named()); }
public FEELFnResult<BigDecimal> invoke(@ParameterName("list") List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "the list cannot be null")); } if (list.isEmpty()) { return FEELFnResult.ofResult(null); // DMN spec, Table 75: ...or null if list is empty } BigDecimal sum = BigDecimal.ZERO; for ( Object element : list ) { if ( element instanceof BigDecimal ) { sum = sum.add( (BigDecimal) element ); } else if ( element instanceof Number ) { BigDecimal value = NumberEvalHelper.getBigDecimalOrNull(element ); if (value == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "an element in the list is not suitable for the sum")); } else { sum = sum.add( value ); } } else { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "an element in the list is not a number")); } } return FEELFnResult.ofResult( sum ); }
@Test void invokeListParamContainsUnsupportedType() { FunctionTestUtil.assertResultError(sumFunction.invoke(Arrays.asList(10, "test", 2)), InvalidParametersEvent.class); }
@Override public T getValue() { return transform(base.getValue()); }
@Test public void returnsATransformedValue() throws Exception { assertThat(gauge2.getValue()) .isEqualTo(3); }
@Override public Integer addAndGetRank(double score, V object) { return get(addAndGetRankAsync(score, object)); }
@Test public void testAddAndGetRank() { RScoredSortedSet<Integer> set = redisson.getScoredSortedSet("simple"); Integer res = set.addAndGetRank(0.3, 1); assertThat(res).isEqualTo(0); Integer res2 = set.addAndGetRank(0.4, 2); assertThat(res2).isEqualTo(1); Integer res3 = set.addAndGetRank(0.2, 3); assertThat(res3).isEqualTo(0); Assertions.assertTrue(set.contains(3)); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testDisplayDataIncludesExplicitlySetDefaults() { HasDefaults options = PipelineOptionsFactory.as(HasDefaults.class); String defaultValue = options.getFoo(); options.setFoo(defaultValue); DisplayData data = DisplayData.from(options); assertThat(data, hasDisplayItem("foo", defaultValue)); }