focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public Set<String> getDeadlockedThreads() { final long[] ids = threads.findDeadlockedThreads(); if (ids != null) { final Set<String> deadlocks = new HashSet<>(); for (ThreadInfo info : threads.getThreadInfo(ids, MAX_STACK_TRACE_DEPTH)) { final StringBuilder stackTrace = new StringBuilder(); for (StackTraceElement element : info.getStackTrace()) { stackTrace.append("\t at ") .append(element.toString()) .append(String.format("%n")); } deadlocks.add( String.format("%s locked on %s (owned by %s):%n%s", info.getThreadName(), info.getLockName(), info.getLockOwnerName(), stackTrace.toString() ) ); } return Collections.unmodifiableSet(deadlocks); } return Collections.emptySet(); }
@Test public void autoDiscoversTheThreadMXBean() { assertThat(new ThreadDeadlockDetector().getDeadlockedThreads()) .isNotNull(); }
public MigrationResult run(Set<String> completedAlertConditions, Set<String> completedAlarmCallbacks) { final MigrationResult.Builder result = MigrationResult.builder(); streamsCollection.find().forEach(stream -> { final String streamId = stream.getObjectId("_id").toHexString(); final String streamTitle = stream.getString("title"); final FindIterable<Document> iterable = alarmCallbacksCollection.find(Filters.eq("stream_id", streamId)); final Set<NotificationDto> notifications = Streams.stream(iterable) .map(alarmCallback -> { final String callbackId = alarmCallback.getObjectId("_id").toHexString(); if (completedAlarmCallbacks.contains(callbackId)) { result.addCompletedAlarmCallback(callbackId); return dbNotificationService.get(callbackId).orElse(null); } try { final NotificationDto notificationDto = migrateAlarmCallback(alarmCallback); result.addCompletedAlarmCallback(callbackId); return notificationDto; } catch (Exception e) { LOG.error("Couldn't migrate legacy alarm callback on stream <{}/{}>: {}", streamTitle, streamId, alarmCallback, e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toSet()); if (!stream.containsKey("alert_conditions")) { return; } @SuppressWarnings("unchecked") final List<Document> list = (List<Document>) stream.get("alert_conditions"); list.forEach(alertCondition -> { final String conditionId = alertCondition.getString("id"); final String conditionType = alertCondition.getString("type"); if (completedAlertConditions.contains(conditionId)) { result.addCompletedAlertCondition(conditionId); return; } try { switch (conditionType) { case "message_count": migrateMessageCount(new Helper(stream, alertCondition, notifications)); result.addCompletedAlertCondition(conditionId); break; case "field_value": migrateFieldValue(new Helper(stream, alertCondition, notifications)); result.addCompletedAlertCondition(conditionId); break; case "field_content_value": migrateFieldContentValue(new Helper(stream, alertCondition, notifications)); result.addCompletedAlertCondition(conditionId); break; default: LOG.warn("Couldn't migrate unknown legacy alert condition type: {}", conditionType); } } catch (Exception e) { LOG.error("Couldn't migrate legacy alert condition on stream <{}/{}>: {}", streamTitle, streamId, alertCondition, e); } }); }); return result.build(); }
@Test @MongoDBFixtures("legacy-alert-conditions.json") public void runWithMigrationStatus() { final int migratedConditions = 9; // Only 8 because we pass one migrated condition in final int migratedCallbacks = 3; // Only 2 because we pass one migrated callback in assertThat(migrator.run(Collections.singleton("00000000-0000-0000-0000-000000000002"), Collections.singleton("54e3deadbeefdeadbeef0001"))).satisfies(result -> { assertThat(result.completedAlertConditions()).containsOnly( "00000000-0000-0000-0000-000000000001", "00000000-0000-0000-0000-000000000002", "00000000-0000-0000-0000-000000000003", "00000000-0000-0000-0000-000000000004", "00000000-0000-0000-0000-000000000005", "00000000-0000-0000-0000-000000000006", "00000000-0000-0000-0000-000000000007", "00000000-0000-0000-0000-000000000008", "00000000-0000-0000-0000-000000000009", "00000000-0000-0000-0000-000000000010" ); assertThat(result.completedAlarmCallbacks()).containsOnly( "54e3deadbeefdeadbeef0001", "54e3deadbeefdeadbeef0002", "54e3deadbeefdeadbeef0003", "54e3deadbeefdeadbeef0004" ); }); // Make sure we use the EventDefinitionHandler to create the event definitions verify(eventDefinitionHandler, times(migratedConditions)).create(any(EventDefinitionDto.class), any(Optional.class)); // Make sure we use the NotificationResourceHandler to create the notifications verify(notificationResourceHandler, times(migratedCallbacks)).create(any(NotificationDto.class), any(Optional.class)); assertThat(eventDefinitionService.streamAll().count()).isEqualTo(migratedConditions); assertThat(notificationService.streamAll().count()).isEqualTo(migratedCallbacks); }
@Override public void setRuntimeContext(RuntimeContext runtimeContext) { Preconditions.checkNotNull(runtimeContext); if (runtimeContext instanceof IterationRuntimeContext) { super.setRuntimeContext( new RichAsyncFunctionIterationRuntimeContext( (IterationRuntimeContext) runtimeContext)); } else { super.setRuntimeContext(new RichAsyncFunctionRuntimeContext(runtimeContext)); } }
@Test void testIterationRuntimeContext() { RichAsyncFunction<Integer, Integer> function = new RichAsyncFunction<Integer, Integer>() { private static final long serialVersionUID = -2023923961609455894L; @Override public void asyncInvoke(Integer input, ResultFuture<Integer> resultFuture) throws Exception { // no op } }; int superstepNumber = 42; IterationRuntimeContext mockedIterationRuntimeContext = mock(IterationRuntimeContext.class); when(mockedIterationRuntimeContext.getSuperstepNumber()).thenReturn(superstepNumber); function.setRuntimeContext(mockedIterationRuntimeContext); IterationRuntimeContext iterationRuntimeContext = function.getIterationRuntimeContext(); assertThat(iterationRuntimeContext.getSuperstepNumber()).isEqualTo(superstepNumber); assertThatThrownBy(() -> iterationRuntimeContext.getIterationAggregator("foobar")) .isInstanceOf(UnsupportedOperationException.class); assertThatThrownBy(() -> iterationRuntimeContext.getPreviousIterationAggregate("foobar")) .isInstanceOf(UnsupportedOperationException.class); }
public AccessControlList() { }
@Test public void testAccessControlList() throws Exception { AccessControlList acl; Collection<String> users; Collection<String> groups; acl = new AccessControlList("drwho tardis"); users = acl.getUsers(); assertThat(users.size()).isOne(); assertThat(users.iterator().next()).isEqualTo("drwho"); groups = acl.getGroups(); assertThat(groups.size()).isOne(); assertThat(groups.iterator().next()).isEqualTo("tardis"); acl = new AccessControlList("drwho"); users = acl.getUsers(); assertThat(users.size()).isOne(); assertThat(users.iterator().next()).isEqualTo("drwho"); groups = acl.getGroups(); assertThat(groups.size()).isZero(); acl = new AccessControlList("drwho "); users = acl.getUsers(); assertThat(users.size()).isOne(); assertThat(users.iterator().next()).isEqualTo("drwho"); groups = acl.getGroups(); assertThat(groups.size()).isZero(); acl = new AccessControlList(" tardis"); users = acl.getUsers(); assertThat(users.size()).isZero(); groups = acl.getGroups(); assertThat(groups.size()).isOne(); assertThat(groups.iterator().next()).isEqualTo("tardis"); Iterator<String> iter; acl = new AccessControlList("drwho,joe tardis, users"); users = acl.getUsers(); assertThat(users.size()).isEqualTo(2); iter = users.iterator(); assertThat(iter.next()).isEqualTo("drwho"); assertThat(iter.next()).isEqualTo("joe"); groups = acl.getGroups(); assertThat(groups.size()).isEqualTo(2); iter = groups.iterator(); assertThat(iter.next()).isEqualTo("tardis"); assertThat(iter.next()).isEqualTo("users"); }
public static Object convert(final Object o) { if (o == null) { return RubyUtil.RUBY.getNil(); } final Class<?> cls = o.getClass(); final Valuefier.Converter converter = CONVERTER_MAP.get(cls); if (converter != null) { return converter.convert(o); } return fallbackConvert(o, cls); }
@Test public void testLocalDate() { LocalDate ld = LocalDate.now(); Object result = Valuefier.convert(ld); assertEquals(JrubyTimestampExtLibrary.RubyTimestamp.class, result.getClass()); }
@Override public void checkAuthorization( final KsqlSecurityContext securityContext, final MetaStore metaStore, final Statement statement ) { if (statement instanceof Query) { validateQuery(securityContext, metaStore, (Query)statement); } else if (statement instanceof InsertInto) { validateInsertInto(securityContext, metaStore, (InsertInto)statement); } else if (statement instanceof CreateAsSelect) { validateCreateAsSelect(securityContext, metaStore, (CreateAsSelect)statement); } else if (statement instanceof PrintTopic) { validatePrintTopic(securityContext, (PrintTopic)statement); } else if (statement instanceof CreateSource) { validateCreateSource(securityContext, (CreateSource)statement); } }
@Test public void shouldThrowWhenCreateSourceWithoutReadPermissionsDenied() { // Given: givenTopicAccessDenied(KAFKA_TOPIC, AclOperation.READ); final Statement statement = givenStatement(String.format( "CREATE STREAM s1 WITH (kafka_topic='%s', value_format='JSON');", KAFKA_TOPIC) ); // When: final Exception e = assertThrows( KsqlTopicAuthorizationException.class, () -> authorizationValidator.checkAuthorization(securityContext, metaStore, statement) ); // Then: assertThat(e.getMessage(), containsString(String.format( "Authorization denied to Read on topic(s): [%s]", KAFKA_TOPIC ))); }
@Override protected void validateDataImpl(TenantId tenantId, WidgetTypeDetails widgetTypeDetails) { validateString("Widgets type name", widgetTypeDetails.getName()); if (widgetTypeDetails.getDescriptor() == null || widgetTypeDetails.getDescriptor().size() == 0) { throw new DataValidationException("Widgets type descriptor can't be empty!"); } if (widgetTypeDetails.getTenantId() == null) { widgetTypeDetails.setTenantId(TenantId.fromUUID(ModelConstants.NULL_UUID)); } if (!widgetTypeDetails.getTenantId().getId().equals(ModelConstants.NULL_UUID)) { if (!tenantService.tenantExists(widgetTypeDetails.getTenantId())) { throw new DataValidationException("Widget type is referencing to non-existent tenant!"); } } }
@Test void testValidateNameInvocation() { WidgetTypeDetails widgetTypeDetails = new WidgetTypeDetails(); widgetTypeDetails.setName("widget type gas"); widgetTypeDetails.setDescriptor(JacksonUtil.toJsonNode("{\"content\":\"empty\"}")); widgetTypeDetails.setTenantId(tenantId); validator.validateDataImpl(tenantId, widgetTypeDetails); verify(validator).validateString("Widgets type name", widgetTypeDetails.getName()); }
public static Builder builder() { return new Builder(); }
@Test public void testEqualsAndHashCode() { AuthPathData authPathData1 = AuthPathData.builder().appName("appName").enabled(true).path("path").build(); AuthPathData authPathData2 = AuthPathData.builder().appName("appName").enabled(true).path("path").build(); Set<AuthPathData> set = new HashSet<>(); set.add(authPathData1); set.add(authPathData2); assertThat(set, hasSize(1)); }
public static <T extends PipelineOptions> T as(Class<T> klass) { return new Builder().as(klass); }
@Test public void testMultipleReturnTypeConflictsThrows() throws Exception { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage( "[org.apache.beam.sdk.options." + "PipelineOptionsFactoryTest$MultiReturnTypeConflict]"); expectedException.expectMessage( "Methods with multiple definitions with different return types"); expectedException.expectMessage("Method [getObject] has multiple definitions"); expectedException.expectMessage( "public abstract java.lang.Object " + "org.apache.beam.sdk.options.PipelineOptionsFactoryTest$" + "MissingSetter.getObject()"); expectedException.expectMessage( "public abstract java.lang.String org.apache.beam.sdk.options." + "PipelineOptionsFactoryTest$MultiReturnTypeConflict.getObject()"); expectedException.expectMessage("Method [getOther] has multiple definitions"); expectedException.expectMessage( "public abstract java.lang.Object " + "org.apache.beam.sdk.options.PipelineOptionsFactoryTest$" + "MultiReturnTypeConflictBase.getOther()"); expectedException.expectMessage( "public abstract java.lang.Long org.apache.beam.sdk.options." + "PipelineOptionsFactoryTest$MultiReturnTypeConflict.getOther()"); PipelineOptionsFactory.as(MultiReturnTypeConflict.class); }
public Plan validateReservationSubmissionRequest( ReservationSystem reservationSystem, ReservationSubmissionRequest request, ReservationId reservationId) throws YarnException { String message; if (reservationId == null) { message = "Reservation id cannot be null. Please try again specifying " + " a valid reservation id by creating a new reservation id."; throw RPCUtil.getRemoteException(message); } // Check if it is a managed queue String queue = request.getQueue(); Plan plan = getPlanFromQueue(reservationSystem, queue, AuditConstants.SUBMIT_RESERVATION_REQUEST); validateReservationDefinition(reservationId, request.getReservationDefinition(), plan, AuditConstants.SUBMIT_RESERVATION_REQUEST); return plan; }
@Test public void testSubmitReservationNegativeRecurrenceExpression() { ReservationSubmissionRequest request = createSimpleReservationSubmissionRequest(1, 1, 1, 5, 3, "-1234"); plan = null; try { plan = rrValidator.validateReservationSubmissionRequest(rSystem, request, ReservationSystemTestUtil.getNewReservationId()); Assert.fail(); } catch (YarnException e) { Assert.assertNull(plan); String message = e.getMessage(); Assert.assertTrue(message .startsWith("Negative Period : ")); LOG.info(message); } }
public FindBrokerResult findBrokerAddressInSubscribe( final String brokerName, final long brokerId, final boolean onlyThisBroker ) { if (brokerName == null) { return null; } String brokerAddr = null; boolean slave = false; boolean found = false; HashMap<Long/* brokerId */, String/* address */> map = this.brokerAddrTable.get(brokerName); if (map != null && !map.isEmpty()) { brokerAddr = map.get(brokerId); slave = brokerId != MixAll.MASTER_ID; found = brokerAddr != null; if (!found && slave) { brokerAddr = map.get(brokerId + 1); found = brokerAddr != null; } if (!found && !onlyThisBroker) { Entry<Long, String> entry = map.entrySet().iterator().next(); brokerAddr = entry.getValue(); slave = entry.getKey() != MixAll.MASTER_ID; found = brokerAddr != null; } } if (found) { return new FindBrokerResult(brokerAddr, slave, findBrokerVersion(brokerName, brokerAddr)); } return null; }
@Test public void testFindBrokerAddressInSubscribe() { // dledger normal case String brokerName = "BrokerA"; HashMap<Long, String> addrMap = new HashMap<>(); addrMap.put(0L, "127.0.0.1:10911"); addrMap.put(1L, "127.0.0.1:10912"); addrMap.put(2L, "127.0.0.1:10913"); brokerAddrTable.put(brokerName, addrMap); long brokerId = 1; FindBrokerResult brokerResult = mqClientInstance.findBrokerAddressInSubscribe(brokerName, brokerId, false); assertThat(brokerResult).isNotNull(); assertThat(brokerResult.getBrokerAddr()).isEqualTo("127.0.0.1:10912"); assertThat(brokerResult.isSlave()).isTrue(); // dledger case, when node n0 was voted as the leader brokerName = "BrokerB"; HashMap<Long, String> addrMapNew = new HashMap<>(); addrMapNew.put(0L, "127.0.0.1:10911"); addrMapNew.put(2L, "127.0.0.1:10912"); addrMapNew.put(3L, "127.0.0.1:10913"); brokerAddrTable.put(brokerName, addrMapNew); brokerResult = mqClientInstance.findBrokerAddressInSubscribe(brokerName, brokerId, false); assertThat(brokerResult).isNotNull(); assertThat(brokerResult.getBrokerAddr()).isEqualTo("127.0.0.1:10912"); assertThat(brokerResult.isSlave()).isTrue(); }
@Udf(description = "Returns the current number of milliseconds for the system since " + "1970-01-01 00:00:00 UTC/GMT.") public long unixTimestamp() { return System.currentTimeMillis(); }
@Test public void shouldReturnMilliseconds() { // When: final long result = udf.unixTimestamp(new Timestamp(100L)); // Then: assertThat(result, is(100L)); }
@Override public void open() throws Exception { super.open(); final String operatorID = getRuntimeContext().getOperatorUniqueID(); this.workerPool = ThreadPools.newWorkerPool("iceberg-worker-pool-" + operatorID, workerPoolSize); }
@TestTemplate public void testCommitTxnWithoutDataFiles() throws Exception { long checkpointId = 0; long timestamp = 0; JobID jobId = new JobID(); OperatorID operatorId; try (OneInputStreamOperatorTestHarness<WriteResult, Void> harness = createStreamSink(jobId)) { harness.setup(); harness.open(); operatorId = harness.getOperator().getOperatorID(); SimpleDataUtil.assertTableRows(table, Lists.newArrayList(), branch); assertSnapshotSize(0); assertMaxCommittedCheckpointId(jobId, operatorId, -1L); // It's better to advance the max-committed-checkpoint-id in iceberg snapshot, so that the // future flink job // failover won't fail. for (int i = 1; i <= 3; i++) { harness.snapshot(++checkpointId, ++timestamp); assertFlinkManifests(0); harness.notifyOfCompletedCheckpoint(checkpointId); assertFlinkManifests(0); assertSnapshotSize(i); assertMaxCommittedCheckpointId(jobId, operatorId, checkpointId); } } }
static BlockStmt getApplyVariableDeclaration(final String variableName, final Apply apply) { final MethodDeclaration methodDeclaration = APPLY_TEMPLATE.getMethodsByName(GETKIEPMMLAPPLY).get(0).clone(); final BlockStmt applyBody = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(applyBody, APPLY).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, APPLY, applyBody))); variableDeclarator.setName(variableName); final BlockStmt toReturn = new BlockStmt(); int counter = 0; final NodeList<Expression> arguments = new NodeList<>(); for (org.dmg.pmml.Expression expression : apply.getExpressions()) { String nestedVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, counter); arguments.add(new NameExpr(nestedVariableName)); BlockStmt toAdd = getKiePMMLExpressionBlockStmt(nestedVariableName, expression); toAdd.getStatements().forEach(toReturn::addStatement); counter++; } final MethodCallExpr initializer = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, APPLY, toReturn))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer); final StringLiteralExpr nameExpr = new StringLiteralExpr(variableName); final StringLiteralExpr functionExpr = new StringLiteralExpr(apply.getFunction()); builder.setArgument(0, nameExpr); builder.setArgument(2, functionExpr); getChainedMethodCallExprFrom("withDefaultValue", initializer).setArgument(0, getExpressionForObject(apply.getDefaultValue())); getChainedMethodCallExprFrom("withMapMissingTo", initializer).setArgument(0, getExpressionForObject(apply.getMapMissingTo())); final Expression invalidTreatmentExpr = apply.getInvalidValueTreatment() != null ? new StringLiteralExpr(apply.getInvalidValueTreatment().value()) : new NullLiteralExpr(); getChainedMethodCallExprFrom("withInvalidValueTreatmentMethod", initializer).setArgument(0, invalidTreatmentExpr); getChainedMethodCallExprFrom("asList", initializer).setArguments(arguments); applyBody.getStatements().forEach(toReturn::addStatement); return toReturn; }
@Test void getApplyVariableDeclarationWithConstants() throws IOException { String variableName = "variableName"; Apply apply = new Apply(); apply.setFunction(function); String mapMissingTo = "mapMissingTo"; apply.setMapMissingTo(mapMissingTo); String defaultValue = "defaultValue"; apply.setDefaultValue(defaultValue); InvalidValueTreatmentMethod invalidValueTreatmentMethod = InvalidValueTreatmentMethod.AS_MISSING; apply.setInvalidValueTreatment(invalidValueTreatmentMethod); Constant constant1 = new Constant(); constant1.setValue(value1); Constant constant2 = new Constant(); constant2.setValue(value2); apply.addExpressions(constant1, constant2); BlockStmt retrieved = org.kie.pmml.compiler.commons.codegenfactories.KiePMMLApplyFactory.getApplyVariableDeclaration(variableName, apply); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, value1, value2, variableName, function, defaultValue, mapMissingTo, invalidValueTreatmentMethod.value())); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(KiePMMLConstant.class, KiePMMLApply.class, Collections.class, Arrays.class); commonValidateCompilationWithImports(retrieved, imports); }
@Override public String toString() { return "ResourceConfig{" + "url=" + url + ", id='" + id + '\'' + ", resourceType=" + resourceType + '}'; }
@Test public void when_addDuplicateJarWithPath_then_throwsException() throws Exception { // Given String resourceId = "jarfile"; String path1 = createFile("path/to/" + resourceId).toString(); String path2 = createFile("path/to/another/" + resourceId).toString(); config.addJar(path1); // When assertThrows(IllegalArgumentException.class, () -> config.addJar(path2)); }
public ProjectStatusResponse.ProjectStatus format() { if (!optionalMeasureData.isPresent()) { return newResponseWithoutQualityGateDetails(); } JsonObject json = JsonParser.parseString(optionalMeasureData.get()).getAsJsonObject(); ProjectStatusResponse.Status qualityGateStatus = measureLevelToQualityGateStatus(json.get("level").getAsString()); projectStatusBuilder.setStatus(qualityGateStatus); projectStatusBuilder.setCaycStatus(caycStatus.toString()); formatIgnoredConditions(json); formatConditions(json.getAsJsonArray("conditions")); formatPeriods(); return projectStatusBuilder.build(); }
@Test public void fail_when_measure_op_is_unknown() { String measureData = "{\n" + " \"level\": \"ERROR\",\n" + " \"conditions\": [\n" + " {\n" + " \"metric\": \"new_coverage\",\n" + " \"op\": \"UNKNOWN\",\n" + " \"period\": 1,\n" + " \"warning\": \"80\",\n" + " \"error\": \"85\",\n" + " \"actual\": \"82.2985024398452\",\n" + " \"level\": \"ERROR\"\n" + " }\n" + " ]\n" + "}"; underTest = newQualityGateDetailsFormatter(measureData, new SnapshotDto()); assertThatThrownBy(() -> underTest.format()) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Unknown quality gate comparator 'UNKNOWN'"); }
@Override public MutableAnalysisMetadataHolder setUuid(String s) { checkState(!uuid.isInitialized(), "Analysis uuid has already been set"); requireNonNull(s, "Analysis uuid can't be null"); this.uuid.setProperty(s); return this; }
@Test public void setUuid_throws_NPE_is_parameter_is_null() { assertThatThrownBy(() -> underTest.setUuid(null)) .isInstanceOf(NullPointerException.class) .hasMessage("Analysis uuid can't be null"); }
@Override public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { throw new ReadOnlyBufferException(); }
@Test public void shouldRejectSetBytes3() { assertThrows(UnsupportedOperationException.class, new Executable() { @Override public void execute() throws IOException { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (byte[]) null, 0, 0); } }); }
@Override public void writeTo(ByteBuf byteBuf) throws LispWriterException { WRITER.writeTo(byteBuf, this); }
@Test public void testSerialization() throws LispReaderException, LispWriterException, LispParseError { ByteBuf byteBuf = Unpooled.buffer(); InfoRequestWriter writer = new InfoRequestWriter(); writer.writeTo(byteBuf, request1); InfoRequestReader reader = new InfoRequestReader(); LispInfoRequest deserialized = reader.readFrom(byteBuf); new EqualsTester().addEqualityGroup(request1, deserialized).testEquals(); }
public static Map<String, String> getKiePMMLScorecardModelSourcesMap(final DroolsCompilationDTO<Scorecard> compilationDTO) { logger.trace("getKiePMMLScorecardModelSourcesMap {} {} {}", compilationDTO.getFields(), compilationDTO.getModel(), compilationDTO.getPackageName()); CompilationUnit cloneCU = getKiePMMLModelCompilationUnit(compilationDTO, KIE_PMML_SCORECARD_MODEL_TEMPLATE_JAVA, KIE_PMML_SCORECARD_MODEL_TEMPLATE); String className = compilationDTO.getSimpleClassName(); ClassOrInterfaceDeclaration modelTemplate = cloneCU.getClassByName(className) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className)); setConstructor(compilationDTO, modelTemplate); Map<String, String> toReturn = new HashMap<>(); String fullClassName = compilationDTO.getPackageCanonicalClassName(); toReturn.put(fullClassName, cloneCU.toString()); return toReturn; }
@Test void getKiePMMLScorecardModelSourcesMap() { final DataDictionary dataDictionary = pmml.getDataDictionary(); final Map<String, KiePMMLOriginalTypeGeneratedType> fieldTypeMap = getFieldTypeMap(dataDictionary, pmml.getTransformationDictionary(), scorecardModel.getLocalTransformations()); final CommonCompilationDTO<Scorecard> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, scorecardModel, new PMMLCompilationContextMock(), "FILENAME"); final DroolsCompilationDTO<Scorecard> droolsCompilationDTO = DroolsCompilationDTO.fromCompilationDTO(compilationDTO, fieldTypeMap); Map<String, String> retrieved = KiePMMLScorecardModelFactory.getKiePMMLScorecardModelSourcesMap(droolsCompilationDTO); assertThat(retrieved).isNotNull(); assertThat(retrieved).hasSize(1); }
@Override @NotNull public BTreeMutable getMutableCopy() { final BTreeMutable result = new BTreeMutable(this); result.addExpiredLoggable(rootLoggable); return result; }
@Test public void testPutRightSplitRight() { int s = 1000; tm = new BTreeEmpty(log, createTestSplittingPolicy(), true, 1).getMutableCopy(); for (int i = 0; i < s; i++) { getTreeMutable().putRight(kv(i, "v" + i)); } checkTree(getTreeMutable(), s).run(); long rootAddress = saveTree(); checkTree(getTreeMutable(), s).run(); reopen(); t = new BTree(log, rootAddress, true, 1); checkTree(getTree(), s).run(); }
public static boolean isInvalidLocalHost(String host) { return host == null || host.length() == 0 || host.equalsIgnoreCase(LOCALHOST_KEY) || host.equals(ANYHOST_VALUE) || host.startsWith("127."); }
@Test void testIsInvalidLocalHost() { assertTrue(NetUtils.isInvalidLocalHost(null)); assertTrue(NetUtils.isInvalidLocalHost("")); assertTrue(NetUtils.isInvalidLocalHost("localhost")); assertTrue(NetUtils.isInvalidLocalHost("0.0.0.0")); assertTrue(NetUtils.isInvalidLocalHost("127.1.2.3")); assertTrue(NetUtils.isInvalidLocalHost("127.0.0.1")); assertFalse(NetUtils.isInvalidLocalHost("128.0.0.1")); }
@Override public Collection<V> readUnion(String... names) { return get(readUnionAsync(names)); }
@Test public void testReadUnion() { RScoredSortedSet<String> set1 = redisson.getScoredSortedSet("simple1"); set1.add(1, "one"); set1.add(2, "two"); set1.add(4, "four"); RScoredSortedSet<String> set2 = redisson.getScoredSortedSet("simple2"); set2.add(1, "one"); set2.add(2, "two"); set2.add(3, "three"); RScoredSortedSet<String> out = redisson.getScoredSortedSet("simple1"); assertThat(out.readUnion(set1.getName(), set2.getName())).containsOnly("one", "two", "three", "four"); }
public static <T> AsList<T> asList() { return new AsList<>(null, false); }
@Test @Category(ValidatesRunner.class) public void testListSideInputIsImmutable() { final PCollectionView<List<Integer>> view = pipeline.apply("CreateSideInput", Create.of(11)).apply(View.asList()); PCollection<Integer> output = pipeline .apply("CreateMainInput", Create.of(29)) .apply( "OutputSideInputs", ParDo.of( new DoFn<Integer, Integer>() { @ProcessElement public void processElement(ProcessContext c) { try { c.sideInput(view).clear(); fail("Expected UnsupportedOperationException on clear()"); } catch (UnsupportedOperationException expected) { } try { c.sideInput(view).add(4); fail("Expected UnsupportedOperationException on add()"); } catch (UnsupportedOperationException expected) { } try { c.sideInput(view).addAll(new ArrayList<>()); fail("Expected UnsupportedOperationException on addAll()"); } catch (UnsupportedOperationException expected) { } try { c.sideInput(view).remove(0); fail("Expected UnsupportedOperationException on remove()"); } catch (UnsupportedOperationException expected) { } for (Integer i : c.sideInput(view)) { c.output(i); } } }) .withSideInputs(view)); // Pass at least one value through to guarantee that DoFn executes. PAssert.that(output).containsInAnyOrder(11); pipeline.run(); }
public TopicConfig setGlobalOrderingEnabled(boolean globalOrderingEnabled) { if (this.multiThreadingEnabled && globalOrderingEnabled) { throw new IllegalArgumentException("Global ordering can not be enabled when multi-threading is used."); } this.globalOrderingEnabled = globalOrderingEnabled; return this; }
@Test public void testSetGlobalOrderingEnabled() { TopicConfig topicConfig = new TopicConfig().setGlobalOrderingEnabled(true); assertTrue(topicConfig.isGlobalOrderingEnabled()); try { topicConfig.setMultiThreadingEnabled(true); assertTrue("multi-threading must be disabled when global-ordering is enabled", false); } catch (IllegalArgumentException e) { // anticipated.. } assertFalse(topicConfig.isMultiThreadingEnabled()); }
@Override public void check(final SQLStatementContext sqlStatementContext, final List<Object> params, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database) { if (sqlStatementContext.getSqlStatement() instanceof DMLStatement) { ShardingRule rule = database.getRuleMetaData().getSingleRule(ShardingRule.class); if (((TableAvailable) sqlStatementContext).getTablesContext().getTableNames().stream().anyMatch(rule::isShardingTable)) { ShardingSpherePreconditions.checkNotEmpty( new ShardingConditionEngine(globalRuleMetaData, database, rule).createShardingConditions(sqlStatementContext, params), DMLWithoutShardingKeyException::new); } } }
@Test void assertEmptyShardingConditionsCheck() { when(sqlStatementContext.getSqlStatement()).thenReturn(mock(DMLStatement.class)); when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singletonList(rule))); when(rule.isShardingTable("t_order")).thenReturn(true); assertThrows(DMLWithoutShardingKeyException.class, () -> shardingAuditAlgorithm.check(sqlStatementContext, Collections.emptyList(), mock(RuleMetaData.class), database)); }
@Override public void getJobs(GetJobsRequest request, StreamObserver<GetJobsResponse> responseObserver) { LOG.trace("{} {}", GetJobsRequest.class.getSimpleName(), request); try { List<JobInfo> result = new ArrayList<>(); for (JobInvocation invocation : invocations.values()) { result.add(invocation.toProto()); } GetJobsResponse response = GetJobsResponse.newBuilder().addAllJobInfo(result).build(); responseObserver.onNext(response); responseObserver.onCompleted(); } catch (Exception e) { LOG.error("Encountered Unexpected Exception", e); responseObserver.onError(Status.INTERNAL.withCause(e).asException()); } }
@Test public void testGetJobsIsSuccessful() throws Exception { prepareAndRunJob(); JobApi.GetJobsRequest request = JobApi.GetJobsRequest.newBuilder().build(); RecordingObserver<JobApi.GetJobsResponse> recorder = new RecordingObserver<>(); service.getJobs(request, recorder); assertThat(recorder.isSuccessful(), is(true)); assertThat(recorder.values, hasSize(1)); JobApi.GetJobsResponse response = recorder.values.get(0); List<JobApi.JobInfo> jobs = response.getJobInfoList(); assertThat(jobs, hasSize(1)); JobApi.JobInfo job = jobs.get(0); assertThat(job.getJobId(), is(TEST_JOB_ID)); assertThat(job.getJobName(), is(TEST_JOB_NAME)); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void retryOnResultUsingObservable() throws InterruptedException { RetryConfig config = RetryConfig.<String>custom() .retryOnResult("retry"::equals) .waitDuration(Duration.ofMillis(50)) .maxAttempts(3).build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willReturn("retry") .willReturn("success"); Observable.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertValueCount(1) .assertValue("success") .assertComplete() .assertSubscribed(); then(helloWorldService).should(times(2)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); assertThat(metrics.getNumberOfSuccessfulCallsWithRetryAttempt()).isEqualTo(1); }
@Override @Transactional(rollbackFor = Exception.class) public void updateSpu(ProductSpuSaveReqVO updateReqVO) { // 校验 SPU 是否存在 validateSpuExists(updateReqVO.getId()); // 校验分类、品牌 validateCategory(updateReqVO.getCategoryId()); brandService.validateProductBrand(updateReqVO.getBrandId()); // 校验SKU List<ProductSkuSaveReqVO> skuSaveReqList = updateReqVO.getSkus(); productSkuService.validateSkuList(skuSaveReqList, updateReqVO.getSpecType()); // 更新 SPU ProductSpuDO updateObj = BeanUtils.toBean(updateReqVO, ProductSpuDO.class); initSpuFromSkus(updateObj, skuSaveReqList); productSpuMapper.updateById(updateObj); // 批量更新 SKU productSkuService.updateSkuList(updateObj.getId(), updateReqVO.getSkus()); }
@Test public void testValidateSpuExists_exception() { ProductSpuSaveReqVO reqVO = randomPojo(ProductSpuSaveReqVO.class); // 调用 Assertions.assertThrows(ServiceException.class, () -> productSpuService.updateSpu(reqVO)); }
@Override public boolean usesLocalFiles() { return false; }
@Test void assertUsesLocalFiles() { assertFalse(metaData.usesLocalFiles()); }
public static void deleteDirectoryQuietly(File directory) { if (directory == null) { return; } // delete and do not report if it fails try { deleteDirectory(directory); } catch (Exception ignored) { } }
@Test void testDeleteQuietly() throws Exception { // should ignore the call FileUtils.deleteDirectoryQuietly(null); File doesNotExist = TempDirUtils.newFolder(temporaryFolder, "abc"); FileUtils.deleteDirectoryQuietly(doesNotExist); File cannotDeleteParent = TempDirUtils.newFolder(temporaryFolder); File cannotDeleteChild = new File(cannotDeleteParent, "child"); try { assertThat(cannotDeleteChild.createNewFile()).isTrue(); assertThat(cannotDeleteParent.setWritable(false)).isTrue(); assertThat(cannotDeleteChild.setWritable(false)).isTrue(); FileUtils.deleteDirectoryQuietly(cannotDeleteParent); } finally { //noinspection ResultOfMethodCallIgnored cannotDeleteParent.setWritable(true); //noinspection ResultOfMethodCallIgnored cannotDeleteChild.setWritable(true); } }
@Override public void registerBeanDefinitions(final AnnotationMetadata metadata, final BeanDefinitionRegistry registry) { registerDefaultConfiguration(metadata, registry); registerShenyuClients(metadata, registry); }
@Test @DisabledForJreRange(min = JRE.JAVA_16) public void registerBeanDefinitionsTest() throws IOException { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); ((DefaultListableBeanFactory) context.getBeanFactory()).setAllowBeanDefinitionOverriding(false); context.register(FeignAutoConfiguration.class); context.register(FeignClientsConfiguration.class); //context.register(HttpClientConfiguration.class); context.register(ShenyuTestConfig.class); context.register(HttpMessageConvertersAutoConfiguration.class); when(feignBlockingLoadBalancerClient.getDelegate()).thenReturn(delegate); context.registerBean(FeignBlockingLoadBalancerClient.class, () -> feignBlockingLoadBalancerClient); final RegisterConfig config = spy(RegisterConfig.class); when(config.getServerLists()).thenReturn("localhost:1234"); final ShenyuDiscoveryClient shenyuDiscoveryClient = spy(new ShenyuDiscoveryClient(config)); context.registerBean(ShenyuDiscoveryClient.class, () -> shenyuDiscoveryClient); context.refresh(); final ShenyuClientsRegistrarTest.ShenyuApiClient apiClient = context.getBean(ShenyuClientsRegistrarTest.ShenyuApiClient.class); Object invocationHandler = ReflectionTestUtils.getField(apiClient, "h"); assertNotNull(invocationHandler); Target.HardCodedTarget factoryBean = (Target.HardCodedTarget) ReflectionTestUtils.getField(invocationHandler, "target"); assertNotNull(factoryBean); assertEquals(factoryBean.name(), "shenyuApiClient"); assertEquals(factoryBean.url(), "http://shenyuApiClient/dev/api"); assertEquals(factoryBean.type(), ShenyuApiClient.class); final Response respSpy = spy(Response.builder() .body("1", StandardCharsets.UTF_8) .status(HttpStatus.OK.value()) .request(Request.create(Request.HttpMethod.POST, "/dev/null", Maps.newHashMap(), null, null, null)) .build()); when(delegate.execute(any(), any())).thenReturn(respSpy); assertThrowsExactly(DecodeException.class, () -> apiClient.del("id")); verify(delegate, times(1)).execute(any(), any()); verify(shenyuDiscoveryClient, times(1)).getInstance(anyString()); }
@Override public PluginRuntime getPluginRuntime() { Summary summary = summarize(); return new PluginRuntime(getId()) .addInfo("timerCount", timerTable.length) .addInfo("taskCount", summary.getTaskCount()) .addInfo("minTaskTime", summary.getMinTaskTimeMillis() + "ms") .addInfo("maxTaskTime", summary.getMaxTaskTimeMillis() + "ms") .addInfo("totalTaskTime", summary.getTotalTaskTimeMillis() + "ms") .addInfo("avgTaskTime", summary.getAvgTaskTimeMillis() + "ms"); }
@Test public void testGetRuntime() { Assert.assertNotNull(new TaskTimeRecordPlugin().getPluginRuntime()); }
public static String getManifestKey(File aJarFile, String key) { try (JarFile jarFile = new JarFile(aJarFile)) { return getManifestKey(jarFile, key); } catch (IOException e) { LOG.error("Exception while trying to read key {} from manifest of {}", key, aJarFile, e); } return null; }
@Test public void shouldGetManifestKey() throws Exception { String manifestKey = JarUtil.getManifestKey(new File(PATH_WITH_HASHES + "test-agent.jar"), "Go-Agent-Bootstrap-Class"); assertThat(manifestKey, is("com.thoughtworks.go.HelloWorldStreamWriter")); }
public static Integer toInt(Object value, Integer defaultValue) { return convertQuietly(Integer.class, value, defaultValue); }
@Test public void enumToIntTest() { final Integer integer = Convert.toInt(BuildingType.CUO); assertEquals(1, integer.intValue()); }
public static Schema create(Type type) { switch (type) { case STRING: return new StringSchema(); case BYTES: return new BytesSchema(); case INT: return new IntSchema(); case LONG: return new LongSchema(); case FLOAT: return new FloatSchema(); case DOUBLE: return new DoubleSchema(); case BOOLEAN: return new BooleanSchema(); case NULL: return new NullSchema(); default: throw new AvroRuntimeException("Can't create a: " + type); } }
@Test void floatDefaultValue() { Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1.0f); assertTrue(field.hasDefaultValue()); assertEquals(1.0f, field.defaultVal()); assertEquals(1.0f, GenericData.get().getDefaultValue(field)); }
public Set<String> usagesOfProfile(final String profileId) { if (!ObjectId.isValid(profileId)) { return Set.of(); } Set<String> usagesInIndexSet = new HashSet<>(); indexSetsCollection .find(Filters.eq(FIELD_PROFILE_ID, profileId)) .projection(Projections.include(INDEX_SET_ID)) .map(document -> document.getObjectId(INDEX_SET_ID).toString()) .into(usagesInIndexSet); return usagesInIndexSet; }
@Test public void testReturnsProperUsagesForSingleProfile() { assertEquals(Set.of("000000000000000000000001", "000000000000000000000011"), toTest.usagesOfProfile(PROFILE1_ID)); assertEquals(Set.of("000000000000000000000002"), toTest.usagesOfProfile(PROFILE2_ID)); assertEquals(Set.of(), toTest.usagesOfProfile(UNUSED_PROFILE_ID)); assertEquals(Set.of(), toTest.usagesOfProfile(WRONG_PROFILE_ID)); }
@Override public void registerProvider(TableProvider provider) { if (providers.containsKey(provider.getTableType())) { throw new IllegalArgumentException( "Provider is already registered for table type: " + provider.getTableType()); } initTablesFromProvider(provider); this.providers.put(provider.getTableType(), provider); }
@Test public void testRegisterProvider() throws Exception { store.registerProvider(new MockTableProvider("mock", "hello", "world")); assertNotNull(store.getProviders()); assertEquals(2, store.getProviders().size()); assertEquals("text", store.getProviders().get("text").getTableType()); assertEquals("mock", store.getProviders().get("mock").getTableType()); assertEquals(2, store.getTables().size()); }
@HighFrequencyInvocation public Optional<EncryptAlgorithm> findQueryEncryptor(final String columnName) { if (!isEncryptColumn(columnName)) { return Optional.empty(); } return Optional.of(getEncryptColumn(columnName).getQueryEncryptor()); }
@Test void assertFindQueryEncryptorWithNotEncryptColumn() { assertThat(encryptTable.findQueryEncryptor("invalidColumn"), is(Optional.empty())); }
@Override public String getAuthProtocol() { return authProtocol; }
@Test public void testGetAuthProtocol() { assertEquals(authProtocol, defaultSnmpv3Device.getAuthProtocol()); }
public List<String> toBatchTaskArgumentString() { List<String> res = new ArrayList<>(Arrays.asList( CLUSTER_LIMIT_FLAG, String.valueOf(mClusterLimit), CLUSTER_START_DELAY_FLAG, mClusterStartDelay, BENCH_TIMEOUT, mBenchTimeout, START_MS_FLAG, String.valueOf(mStartMs))); if (!mProfileAgent.isEmpty()) { res.add(PROFILE_AGENT); res.add(mProfileAgent); } if (!mId.equals(DEFAULT_TASK_ID)) { res.add(ID_FLAG); res.add(mId); } if (!mIndex.equals(DEFAULT_TASK_ID)) { res.add(INDEX_FLAG); res.add(mIndex); } if (!mJavaOpts.isEmpty()) { for (String s : mJavaOpts) { res.add(JAVA_OPT_FLAG); res.add(s); } } if (mCluster) { res.add(CLUSTER_FLAG); } if (mDistributed) { res.add(DISTRIBUTED_FLAG); } if (mInProcess) { res.add(IN_PROCESS_FLAG); } if (mHelp) { res.add(HELP_FLAG); } return res; }
@Test public void parseParametersToArgumentEmpty() throws Exception { // with an empty input, the output should be default value of base parameters String[] inputArgs = new String[0]; JCommander jc = new JCommander(this); jc.parse(inputArgs); List<String> outputArgs = mBaseParameter.toBatchTaskArgumentString(); String[] defaultArgs = new String[]{ // keys with values (4 pairs) "--cluster-limit", "0", "--cluster-start-delay", "10s", "--bench-timeout", "20m", "--start-ms", "-1", }; assertEquals(outputArgs.size(), 8); // the two special parameters should not be parsed assertFalse(outputArgs.contains(PROFILE_AGENT)); assertFalse(outputArgs.contains(ID_FLAG)); validateOutput(Arrays.asList(defaultArgs), outputArgs, 4); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final AttributedList<ch.cyberduck.core.Path> paths = new AttributedList<>(); final java.nio.file.Path p = session.toPath(directory); if(!Files.exists(p)) { throw new LocalExceptionMappingService().map("Listing directory {0} failed", new NoSuchFileException(directory.getAbsolute()), directory); } try (DirectoryStream<java.nio.file.Path> stream = Files.newDirectoryStream(p)) { for(java.nio.file.Path n : stream) { if(null == n.getFileName()) { continue; } try { final PathAttributes attributes = feature.toAttributes(n); final EnumSet<Path.Type> type = EnumSet.noneOf(Path.Type.class); if(Files.isDirectory(n)) { type.add(Path.Type.directory); } else { type.add(Path.Type.file); } final Path file = new Path(directory, n.getFileName().toString(), type, attributes); if(this.post(n, file)) { paths.add(file); listener.chunk(directory, paths); } } catch(IOException e) { log.warn(String.format("Failure reading attributes for %s", n)); } } } catch(IOException ex) { throw new LocalExceptionMappingService().map("Listing directory {0} failed", ex, directory); } return paths; }
@Test public void testDifferentDrive() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); if(!session.isPosixFilesystem()) { assertNotNull(session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback())); assertTrue(session.isConnected()); assertNotNull(session.getClient()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path test = new Path("/D:/", EnumSet.of(Path.Type.directory, Path.Type.volume)); final AttributedList<Path> list = new LocalListService(session).list(test, new DisabledListProgressListener()); assertNotSame(AttributedList.emptyList(), list); } session.close(); }
@SuppressWarnings("fallthrough") public static int murmur2(final byte[] data) { int length = data.length; int seed = 0x9747b28c; // 'm' and 'r' are mixing constants generated offline. // They're not really 'magic', they just happen to work well. final int m = 0x5bd1e995; final int r = 24; // Initialize the hash to a random value int h = seed ^ length; int length4 = length / 4; for (int i = 0; i < length4; i++) { final int i4 = i * 4; int k = (data[i4 + 0] & 0xff) + ((data[i4 + 1] & 0xff) << 8) + ((data[i4 + 2] & 0xff) << 16) + ((data[i4 + 3] & 0xff) << 24); k *= m; k ^= k >>> r; k *= m; h *= m; h ^= k; } // Handle the last few bytes of the input array switch (length % 4) { case 3: h ^= (data[(length & ~3) + 2] & 0xff) << 16; case 2: h ^= (data[(length & ~3) + 1] & 0xff) << 8; case 1: h ^= data[length & ~3] & 0xff; h *= m; } h ^= h >>> 13; h *= m; h ^= h >>> 15; return h; }
@Test public void testMurmur2() { Map<byte[], Integer> cases = new java.util.HashMap<>(); cases.put("21".getBytes(), -973932308); cases.put("foobar".getBytes(), -790332482); cases.put("a-little-bit-long-string".getBytes(), -985981536); cases.put("a-little-bit-longer-string".getBytes(), -1486304829); cases.put("lkjh234lh9fiuh90y23oiuhsafujhadof229phr9h19h89h8".getBytes(), -58897971); cases.put(new byte[] {'a', 'b', 'c'}, 479470107); for (Map.Entry<byte[], Integer> c : cases.entrySet()) { assertEquals(c.getValue().intValue(), murmur2(c.getKey())); } }
@Override public void onEvent(ReplicaMigrationEvent event) { switch (event.getPartitionId()) { case MIGRATION_STARTED_PARTITION_ID: migrationListener.migrationStarted(event.getMigrationState()); break; case MIGRATION_FINISHED_PARTITION_ID: migrationListener.migrationFinished(event.getMigrationState()); break; default: if (event.isSuccess()) { migrationListener.replicaMigrationCompleted(event); } else { migrationListener.replicaMigrationFailed(event); } } }
@Test public void test_migrationFailed() { MigrationState migrationSchedule = new MigrationStateImpl(); ReplicaMigrationEvent event = new ReplicaMigrationEventImpl(migrationSchedule, 0, 0, null, null, false, 0L); adapter.onEvent(event); verify(listener, never()).migrationStarted(any(MigrationState.class)); verify(listener, never()).migrationFinished(any(MigrationState.class)); verify(listener, never()).replicaMigrationCompleted(any(ReplicaMigrationEvent.class)); verify(listener).replicaMigrationFailed(event); }
@Override public ProcessConfigurable<?> toSink(Sink<T> sink) { DataStreamV2SinkTransformation<T, T> sinkTransformation = StreamUtils.addSinkOperator(this, sink, getType()); // Operator parallelism should always be 1 for global stream. // parallelismConfigured should be true to avoid overwritten by AdaptiveBatchScheduler. sinkTransformation.setParallelism(1, true); return StreamUtils.wrapWithConfigureHandle( new GlobalStreamImpl<>(environment, sinkTransformation)); }
@Test void testToSink() throws Exception { ExecutionEnvironmentImpl env = StreamTestUtils.getEnv(); GlobalStreamImpl<Integer> stream = new GlobalStreamImpl<>(env, new TestingTransformation<>("t1", Types.INT, 1)); stream.toSink(DataStreamV2SinkUtils.wrapSink(new DiscardingSink<>())); List<Transformation<?>> transformations = env.getTransformations(); assertThat(transformations) .hasSize(1) .element(0) .isInstanceOf(DataStreamV2SinkTransformation.class); }
public void setDefault(String key, String value) { String s = properties.getProperty(key); if (StringUtils.isBlank(s)) { properties.setProperty(key, value); } }
@Test public void setDefault() { Properties p = new Properties(); p.setProperty("foo", "foo_value"); Props props = new Props(p); props.setDefault("foo", "foo_def"); props.setDefault("bar", "bar_def"); assertThat(props.value("foo")).isEqualTo("foo_value"); assertThat(props.value("bar")).isEqualTo("bar_def"); assertThat(props.value("other")).isNull(); }
public static TbMathArgumentValue fromString(String value) { try { return new TbMathArgumentValue(Double.parseDouble(value)); } catch (NumberFormatException ne) { throw new RuntimeException("Can't convert value '" + value + "' to double!"); } }
@Test public void test_fromString_then_failure() { var value = "Test"; Throwable thrown = assertThrows(RuntimeException.class, () -> TbMathArgumentValue.fromString(value)); Assertions.assertNotNull(thrown.getMessage()); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatDropStreamStatement() { // Given: final DropStream dropStream = new DropStream(SOMETHING, false, false); // When: final String formatted = SqlFormatter.formatSql(dropStream); // Then: assertThat(formatted, is("DROP STREAM SOMETHING")); }
public FEELFnResult<TemporalAccessor> invoke() { return FEELFnResult.ofResult( LocalDate.now() ); }
@Test void invoke() { FunctionTestUtil.assertResult(todayFunction.invoke(), LocalDate.now()); }
@Override public String toString(final RouteUnit routeUnit) { if (null != ownerName && !Strings.isNullOrEmpty(ownerName.getValue()) && tableName.getValue().equals(ownerName.getValue())) { Set<String> actualTableNames = routeUnit.getActualTableNames(tableName.getValue()); String actualTableName = actualTableNames.isEmpty() ? tableName.getValue().toLowerCase() : actualTableNames.iterator().next(); return tableName.getQuoteCharacter().wrap(actualTableName) + "."; } return toString(); }
@Test void assertOwnerTokenWithNoRouteUnitAndOwnerNameNotEqualsTableName() { OwnerToken ownerToken = new OwnerToken(0, 1, new IdentifierValue("ud"), new IdentifierValue("t_user_detail")); assertThat(ownerToken.toString(), is("ud.")); assertTokenGrid(ownerToken); }
public AbstractFile findFileByOffset(final long offset, final boolean returnFirstIfNotFound) { this.readLock.lock(); try { if (this.files.size() == 0) return null; final AbstractFile firstAbstractFile = getFirstFile(); final AbstractFile lastAbstractFile = getLastFile(); if (firstAbstractFile != null && lastAbstractFile != null) { if (offset < firstAbstractFile.getFileFromOffset() || offset >= lastAbstractFile.getFileFromOffset() + this.fileSize) { LOG.warn( "Offset not matched. Request offset: {}, firstOffset: {}, lastOffset: {}, fileSize: {}, fileNums: {}", offset, firstAbstractFile.getFileFromOffset(), lastAbstractFile.getFileFromOffset() + this.fileSize, this.fileSize, this.files.size()); } else { // Locate the index final int index = (int) ((offset / this.fileSize) - (firstAbstractFile.getFileFromOffset() / this.fileSize)); AbstractFile targetFile; targetFile = this.files.get(index); if (targetFile != null && offset >= targetFile.getFileFromOffset() && offset < targetFile.getFileFromOffset() + this.fileSize) { return targetFile; } // If pre not found , then traverse to find target file for (final AbstractFile abstractFile : this.files) { if (offset >= abstractFile.getFileFromOffset() && offset < abstractFile.getFileFromOffset() + this.fileSize) { return abstractFile; } } } if (returnFirstIfNotFound) { return firstAbstractFile; } } } catch (final Exception e) { LOG.error("Error on find abstractFile by offset :{}, file type:{}", offset, this.fileType.getFileName(), e); } finally { this.readLock.unlock(); } return null; }
@Test public void testFindAbstractFileByOffset() { writeDataToSecondFile(); { // Test find first file by offset 0 final AbstractFile firstFile = this.fileManager.findFileByOffset(30, false); assertEquals(firstFile.getFileFromOffset(), 0); // Test find second file by offset 136 final AbstractFile secondFile = this.fileManager.findFileByOffset(this.indexFileSize + 10, false); assertEquals(secondFile.getFileFromOffset(), this.indexFileSize); } }
@Override public void validateAction( RepositoryOperation... operations ) throws KettleException { for ( RepositoryOperation operation : operations ) { switch ( operation ) { case EXECUTE_TRANSFORMATION: case EXECUTE_JOB: checkOperationAllowed( EXECUTE_CONTENT_ACTION ); break; case MODIFY_TRANSFORMATION: case MODIFY_JOB: checkOperationAllowed( CREATE_CONTENT_ACTION ); break; case SCHEDULE_TRANSFORMATION: case SCHEDULE_JOB: checkOperationAllowed( SCHEDULE_CONTENT_ACTION ); break; case MODIFY_DATABASE: checkOperationAllowed( MODIFY_DATABASE_ACTION ); break; case SCHEDULER_EXECUTE: checkOperationAllowed( SCHEDULER_EXECUTE_ACTION ); break; } } }
@Test public void noExceptionThrown_WhenOperationIsAllowed_ExecuteOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.CREATE_CONTENT_ACTION, true ); provider.validateAction( RepositoryOperation.MODIFY_TRANSFORMATION ); }
public static void log() { out.println(); }
@Test public void logTest2(){ Console.log("a", "b", "c"); Console.log((Object) "a", "b", "c"); }
@Override public ExecutorService newCachedThreadPool(ThreadFactory threadFactory) { ExecutorService executorService = threadPoolFactory.newCachedThreadPool(threadFactory); return ExecutorServiceMetrics.monitor(meterRegistry, executorService, name(prefix)); }
@Test public void testNewCachedThreadPool() { final ExecutorService executorService = instrumentedThreadPoolFactory.newCachedThreadPool(threadFactory); assertThat(executorService, is(notNullValue())); assertThat(executorService, is(instanceOf(TimedExecutorService.class))); Tags tags = Tags.of("name", "instrumented-delegate-1"); inOrder.verify(registry, times(1)).timer("executor", tags); }
public static java.util.regex.Pattern compilePattern(String expression) { return compilePattern(expression, 0); }
@Test void testCompilePatternNull() { assertThrows(NullPointerException.class, () -> JMeterUtils.compilePattern(null)); }
@Override public boolean retainAll(Collection<?> c) { // will throw UnsupportedOperationException; delegate anyway for testability return underlying().retainAll(c); }
@Test public void testDelegationOfUnsupportedFunctionRetainAll() { new PCollectionsHashSetWrapperDelegationChecker<>() .defineMockConfigurationForUnsupportedFunction(mock -> mock.retainAll(eq(Collections.emptyList()))) .defineWrapperUnsupportedFunctionInvocation(wrapper -> wrapper.retainAll(Collections.emptyList())) .doUnsupportedFunctionDelegationCheck(); }
public static Collection<StaticRoute> parseRoutingTable(HierarchicalConfiguration cfg) { Collection<StaticRoute> staticRoutes = new HashSet<>(); HierarchicalConfiguration routeInfo = cfg.configurationAt("route-information"); List<HierarchicalConfiguration> routeTables = routeInfo.configurationsAt("route-table"); for (HierarchicalConfiguration routeTable : routeTables) { List<HierarchicalConfiguration> routes = routeTable.configurationsAt("rt"); for (HierarchicalConfiguration route : routes) { if (route != null) { List<HierarchicalConfiguration> rtEntries = route.configurationsAt("rt-entry"); rtEntries.forEach(rtEntry -> { if (rtEntry.getString(PROTOCOL_NAME) != null && rtEntry.getString(PROTOCOL_NAME).contains("Static")) { parseStaticRoute(rtEntry, route.getString("rt-destination"), rtEntry.getString("metric")) .ifPresent(staticRoutes::add); } }); } } } return staticRoutes; }
@Test public void testStaticRoutesParsedFromJunos18() { HierarchicalConfiguration reply = XmlConfigParser.loadXml( getClass().getResourceAsStream("/Junos_get-route-information_response_18.4.xml")); final Collection<StaticRoute> expected = new HashSet<>(); expected.add(new StaticRoute(Ip4Prefix.valueOf("0.0.0.0/0"), Ip4Address.valueOf("172.26.138.1"), false, 100)); assertEquals(expected, JuniperUtils.parseRoutingTable(reply)); }
@Override protected OAuth2AccessToken createToken(String accessToken, String tokenType, Integer expiresIn, String refreshToken, String scope, JsonNode response, String rawResponse) { var token = super.createToken(accessToken, tokenType, expiresIn, refreshToken, scope, response, rawResponse); var uid = extractRequiredParameter(response, "uid", rawResponse).asText(); if (uid == null || Pac4jConstants.EMPTY_STRING.equals(uid)) { throw new OAuthException( "There is no required UID in the response of the AssessToken endpoint."); } return new WeiboToken(token, uid); }
@Test public void createTokenHasUid() throws IOException { var accessToken = extractor.createToken("ACCESS_TOKEN", null, 123, null, null, mapper.readTree(responseOk), responseOk); Assert.assertEquals("ACCESS_TOKEN", accessToken.getAccessToken()); assertTrue(accessToken instanceof WeiboToken); if (accessToken instanceof WeiboToken) { Assert.assertEquals("12341234", ((WeiboToken) accessToken).getUid()); } }
public void deletePartitionMetadataTable() { List<String> ddl = new ArrayList<>(); if (this.isPostgres()) { ddl.add("DROP INDEX \"" + CREATED_AT_START_TIMESTAMP_INDEX + "\""); ddl.add("DROP INDEX \"" + WATERMARK_INDEX + "\""); ddl.add("DROP TABLE \"" + tableName + "\""); } else { ddl.add("DROP INDEX " + CREATED_AT_START_TIMESTAMP_INDEX); ddl.add("DROP INDEX " + WATERMARK_INDEX); ddl.add("DROP TABLE " + tableName); } OperationFuture<Void, UpdateDatabaseDdlMetadata> op = databaseAdminClient.updateDatabaseDdl(instanceId, databaseId, ddl, null); try { // Initiate the request which returns an OperationFuture. op.get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } catch (ExecutionException | TimeoutException e) { // If the operation failed or timed out during execution, expose the cause. if (e.getCause() != null) { throw (SpannerException) e.getCause(); } else { throw SpannerExceptionFactory.asSpannerException(e); } } catch (InterruptedException e) { // Throw when a thread is waiting, sleeping, or otherwise occupied, // and the thread is interrupted, either before or during the activity. throw SpannerExceptionFactory.propagateInterrupt(e); } }
@Test public void testDeletePartitionMetadataTableWithInterruptedException() throws Exception { when(op.get(10, TimeUnit.MINUTES)).thenThrow(new InterruptedException(INTERRUPTED)); try { partitionMetadataAdminDao.deletePartitionMetadataTable(); fail(); } catch (SpannerException e) { assertEquals(ErrorCode.CANCELLED, e.getErrorCode()); assertTrue(e.getMessage().contains(INTERRUPTED)); } }
@Override public synchronized T getValue(int index) { BarSeries series = getBarSeries(); if (series == null) { // Series is null; the indicator doesn't need cache. // (e.g. simple computation of the value) // --> Calculating the value T result = calculate(index); if (log.isTraceEnabled()) { log.trace("{}({}): {}", this, index, result); } return result; } // Series is not null final int removedBarsCount = series.getRemovedBarsCount(); final int maximumResultCount = series.getMaximumBarCount(); T result; if (index < removedBarsCount) { // Result already removed from cache if (log.isTraceEnabled()) { log.trace("{}: result from bar {} already removed from cache, use {}-th instead", getClass().getSimpleName(), index, removedBarsCount); } increaseLengthTo(removedBarsCount, maximumResultCount); highestResultIndex = removedBarsCount; result = results.get(0); if (result == null) { // It should be "result = calculate(removedBarsCount);". // We use "result = calculate(0);" as a workaround // to fix issue #120 (https://github.com/mdeverdelhan/ta4j/issues/120). result = calculate(0); results.set(0, result); } } else { if (index == series.getEndIndex()) { // Don't cache result if last bar result = calculate(index); } else { increaseLengthTo(index, maximumResultCount); if (index > highestResultIndex) { // Result not calculated yet highestResultIndex = index; result = calculate(index); results.set(results.size() - 1, result); } else { // Result covered by current cache int resultInnerIndex = results.size() - 1 - (highestResultIndex - index); result = results.get(resultInnerIndex); if (result == null) { result = calculate(index); results.set(resultInnerIndex, result); } } } } if (log.isTraceEnabled()) { log.trace("{}({}): {}", this, index, result); } return result; }
@Test public void getValueWithOldResultsRemoval() { double[] data = new double[20]; Arrays.fill(data, 1); BarSeries barSeries = new MockBarSeries(numFunction, data); SMAIndicator sma = new SMAIndicator(new ClosePriceIndicator(barSeries), 10); assertNumEquals(1, sma.getValue(5)); assertNumEquals(1, sma.getValue(10)); barSeries.setMaximumBarCount(12); assertNumEquals(1, sma.getValue(19)); }
@Override public Iterator<QueryableEntry> iterator() { return new It(); }
@Test // https://github.com/hazelcast/hazelcast/issues/1501 public void iteratingOver_noException() { Set<QueryableEntry> entries = generateEntries(100000); AndResultSet resultSet = new AndResultSet(entries, null, asList(Predicates.alwaysFalse())); Iterator it = resultSet.iterator(); boolean result = it.hasNext(); assertFalse(result); }
int normalizeIndex(int index) { int mod = index % NUMBER_OF_BUCKETS; return mod >= 0 ? mod : mod + NUMBER_OF_BUCKETS; }
@Test public void normalizeIndex() { SlidingTimeWindowMovingAverages stwm = new SlidingTimeWindowMovingAverages(); assertThat(stwm.normalizeIndex(0)).isEqualTo(0); assertThat(stwm.normalizeIndex(900)).isEqualTo(0); assertThat(stwm.normalizeIndex(9000)).isEqualTo(0); assertThat(stwm.normalizeIndex(-900)).isEqualTo(0); assertThat(stwm.normalizeIndex(1)).isEqualTo(1); assertThat(stwm.normalizeIndex(899)).isEqualTo(899); assertThat(stwm.normalizeIndex(-1)).isEqualTo(899); assertThat(stwm.normalizeIndex(-901)).isEqualTo(899); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldRejectIncorrectlyTypedSingleHeaderColumns() { // Given: final SingleStatementContext stmt = givenQuery("CREATE STREAM INPUT (K BIGINT HEADER('abc')) WITH (kafka_topic='input',value_format='JSON');"); // When: final KsqlException e = assertThrows(KsqlException.class, () -> { builder.buildStatement(stmt); }); // Then: assertThat(e.getMessage(), is("Invalid type for HEADER('abc') column: expected BYTES, got BIGINT")); }
public static boolean includes(final ConvertedMap data, final FieldReference field) { final Object target = findParent(data, field); final String key = field.getKey(); return target instanceof ConvertedMap && ((ConvertedMap) target).containsKey(key) || target instanceof ConvertedList && foundInList(key, (ConvertedList) target); }
@Test public void testNilInclude() throws Exception { final ConvertedMap data = new ConvertedMap(1); data.put("nilfield", null); assertTrue(includes(data, "nilfield")); }
static java.sql.Time parseSqlTime(final String value) { try { // JDK format in Time.valueOf is compatible with DATE_FORMAT return Time.valueOf(value); } catch (IllegalArgumentException e) { return throwRuntimeParseException(value, new ParseException(value, 0), SQL_TIME_FORMAT); } }
@Test public void testTimeWithLeadingZeros() throws Exception { // Given Time expectedTime = new Time( new SimpleDateFormat(SQL_TIME_FORMAT) .parse("01:02:03") .getTime() ); // When Time actualTime = DateHelper.parseSqlTime(expectedTime.toString()); // Then assertSqlTimesEqual(expectedTime, actualTime); }
@Override public DescriptiveUrlBag toUrl(final Path file) { final DescriptiveUrlBag list = new DescriptiveUrlBag(); if(new HostPreferences(session.getHost()).getBoolean("s3.bucket.virtualhost.disable")) { list.addAll(new DefaultUrlProvider(session.getHost()).toUrl(file)); } else { list.add(this.toUrl(file, session.getHost().getProtocol().getScheme(), session.getHost().getPort())); list.add(this.toUrl(file, Scheme.http, 80)); if(StringUtils.isNotBlank(session.getHost().getWebURL())) { // Only include when custom domain is configured list.addAll(new HostWebUrlProvider(session.getHost()).toUrl(file)); } } if(file.isFile()) { if(!session.getHost().getCredentials().isAnonymousLogin()) { // X-Amz-Expires must be less than a week (in seconds); that is, the given X-Amz-Expires must be less // than 604800 seconds // In one hour list.add(this.toSignedUrl(file, (int) TimeUnit.HOURS.toSeconds(1))); // Default signed URL expiring in 24 hours. list.add(this.toSignedUrl(file, (int) TimeUnit.SECONDS.toSeconds( new HostPreferences(session.getHost()).getInteger("s3.url.expire.seconds")))); // 1 Week list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(7))); switch(session.getSignatureVersion()) { case AWS2: // 1 Month list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(30))); // 1 Year list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(365))); break; case AWS4HMACSHA256: break; } } } // AWS services require specifying an Amazon S3 bucket using S3://bucket list.add(new DescriptiveUrl(URI.create(String.format("s3://%s%s", containerService.getContainer(file).getName(), file.isRoot() ? Path.DELIMITER : containerService.isContainer(file) ? Path.DELIMITER : String.format("/%s", URIEncoder.encode(containerService.getKey(file))))), DescriptiveUrl.Type.provider, MessageFormat.format(LocaleFactory.localizedString("{0} URL"), "S3"))); // Filter by matching container name final Optional<Set<Distribution>> filtered = distributions.entrySet().stream().filter(entry -> new SimplePathPredicate(containerService.getContainer(file)).test(entry.getKey())) .map(Map.Entry::getValue).findFirst(); if(filtered.isPresent()) { // Add CloudFront distributions for(Distribution distribution : filtered.get()) { list.addAll(new DistributionUrlProvider(distribution).toUrl(file)); } } return list; }
@Test public void testHttpUriCustomPort() { session.getHost().setPort(8443); final Iterator<DescriptiveUrl> http = new S3UrlProvider(session, Collections.emptyMap()).toUrl(new Path("/test-eu-west-1-cyberduck/key", EnumSet.of(Path.Type.file))).filter(DescriptiveUrl.Type.http).iterator(); assertEquals("https://test-eu-west-1-cyberduck.s3.amazonaws.com:8443/key", http.next().getUrl()); assertEquals("http://test-eu-west-1-cyberduck.s3.amazonaws.com/key", http.next().getUrl()); }
@Override protected Optional<ErrorResponse> filter(DiscFilterRequest request) { String method = request.getMethod(); URI uri = request.getUri(); for (Rule rule : rules) { if (rule.matches(method, uri)) { log.log(Level.FINE, () -> String.format("Request '%h' with method '%s' and uri '%s' matched rule '%s'", request, method, uri, rule.name)); return responseFor(request, rule.name, rule.response); } } return responseFor(request, "default", defaultResponse); }
@Test void includes_default_rule_response_headers_in_response_for_blocked_request() throws IOException { RuleBasedFilterConfig config = new RuleBasedFilterConfig.Builder() .dryrun(false) .defaultRule(new DefaultRule.Builder() .action(DefaultRule.Action.Enum.BLOCK) .blockResponseHeaders(new DefaultRule.BlockResponseHeaders.Builder() .name("Response-Header-1").value("first-header")) .blockResponseHeaders(new DefaultRule.BlockResponseHeaders.Builder() .name("Response-Header-2").value("second-header"))) .build(); Metric metric = mock(Metric.class); RuleBasedRequestFilter filter = new RuleBasedRequestFilter(metric, config); MockResponseHandler responseHandler = new MockResponseHandler(); filter.filter(request("GET", "http://myserver:80/"), responseHandler); assertBlocked(responseHandler, metric, 403, ""); Response response = responseHandler.getResponse(); assertResponseHeader(response, "Response-Header-1", "first-header"); assertResponseHeader(response, "Response-Header-2", "second-header"); }
@Override public List<GrokPattern> saveAll(Collection<GrokPattern> patterns, ImportStrategy importStrategy) throws ValidationException { final Map<String, GrokPattern> newPatternsByName; try { newPatternsByName = patterns.stream().collect(Collectors.toMap(GrokPattern::name, Function.identity())); } catch (IllegalStateException e) { throw new ValidationException("The supplied Grok patterns contain conflicting names: " + e.getLocalizedMessage()); } final Map<String, GrokPattern> existingPatternsByName = loadAll().stream().collect(Collectors.toMap(GrokPattern::name, Function.identity())); if (importStrategy == ABORT_ON_CONFLICT) { final Sets.SetView<String> conflictingNames = Sets.intersection(newPatternsByName.keySet(), existingPatternsByName.keySet()); if (!conflictingNames.isEmpty()) { final Iterable<String> limited = Iterables.limit(conflictingNames, MAX_DISPLAYED_CONFLICTS); throw new ValidationException("The following Grok patterns already exist: " + StringUtils.join(limited, ", ") + (conflictingNames.size() > MAX_DISPLAYED_CONFLICTS ? " (+ " + (conflictingNames.size() - MAX_DISPLAYED_CONFLICTS) + " more)" : "") + "."); } } validateAllOrThrow(patterns, importStrategy); final List<GrokPattern> savedPatterns = patterns.stream() .map(newPattern -> { final GrokPattern existingPattern = existingPatternsByName.get(newPattern.name()); if (existingPattern != null) { return newPattern.toBuilder().id(existingPattern.id()).build(); } else { return newPattern; } }) .map(dbCollection::save) .map(WriteResult::getSavedObject).collect(Collectors.toList()); clusterBus.post(GrokPatternsUpdatedEvent.create(newPatternsByName.keySet())); return savedPatterns; }
@Test @MongoDBFixtures("MongoDbGrokPatternServiceTest.json") public void saveAllWithDropAllExisting() throws ValidationException { assertThat(collection.countDocuments()).isEqualTo(3); final List<GrokPattern> grokPatterns = ImmutableList.of(GrokPattern.create("Test", "Pattern")); final List<GrokPattern> savedGrokPatterns = service.saveAll(grokPatterns, DROP_ALL_EXISTING); assertThat(savedGrokPatterns).hasSize(1); assertThat(collection.countDocuments()).isEqualTo(1); }
public ModelMBeanInfo getMBeanInfo(Object defaultManagedBean, Object customManagedBean, String objectName) throws JMException { if ((defaultManagedBean == null && customManagedBean == null) || objectName == null) return null; // skip proxy classes if (defaultManagedBean != null && Proxy.isProxyClass(defaultManagedBean.getClass())) { LOGGER.trace("Skip creating ModelMBeanInfo due proxy class {}", defaultManagedBean.getClass()); return null; } // maps and lists to contain information about attributes and operations Map<String, ManagedAttributeInfo> attributes = new LinkedHashMap<>(); Set<ManagedOperationInfo> operations = new LinkedHashSet<>(); Set<ModelMBeanAttributeInfo> mBeanAttributes = new LinkedHashSet<>(); Set<ModelMBeanOperationInfo> mBeanOperations = new LinkedHashSet<>(); Set<ModelMBeanNotificationInfo> mBeanNotifications = new LinkedHashSet<>(); // extract details from default managed bean if (defaultManagedBean != null) { extractAttributesAndOperations(defaultManagedBean.getClass(), attributes, operations); extractMbeanAttributes(defaultManagedBean, attributes, mBeanAttributes, mBeanOperations); extractMbeanOperations(defaultManagedBean, operations, mBeanOperations); extractMbeanNotifications(defaultManagedBean, mBeanNotifications); } // extract details from custom managed bean if (customManagedBean != null) { extractAttributesAndOperations(customManagedBean.getClass(), attributes, operations); extractMbeanAttributes(customManagedBean, attributes, mBeanAttributes, mBeanOperations); extractMbeanOperations(customManagedBean, operations, mBeanOperations); extractMbeanNotifications(customManagedBean, mBeanNotifications); } // create the ModelMBeanInfo String name = getName(customManagedBean != null ? customManagedBean : defaultManagedBean, objectName); String description = getDescription(customManagedBean != null ? customManagedBean : defaultManagedBean, objectName); ModelMBeanAttributeInfo[] arrayAttributes = mBeanAttributes.toArray(new ModelMBeanAttributeInfo[mBeanAttributes.size()]); ModelMBeanOperationInfo[] arrayOperations = mBeanOperations.toArray(new ModelMBeanOperationInfo[mBeanOperations.size()]); ModelMBeanNotificationInfo[] arrayNotifications = mBeanNotifications.toArray(new ModelMBeanNotificationInfo[mBeanNotifications.size()]); ModelMBeanInfo info = new ModelMBeanInfoSupport(name, description, arrayAttributes, null, arrayOperations, arrayNotifications); LOGGER.trace("Created ModelMBeanInfo {}", info); return info; }
@Test(expected = IllegalArgumentException.class) public void testAttributeHavingParameter() throws JMException { mbeanInfoAssembler.getMBeanInfo(new BadAttributeGetterHavinParameter(), null, "someName"); }
public static void cleanDirectory(File directory) throws IOException { requireNonNull(directory, DIRECTORY_CAN_NOT_BE_NULL); if (!directory.exists()) { return; } cleanDirectoryImpl(directory.toPath()); }
@Test public void cleanDirectory_removes_directories_and_files_in_target_directory_but_not_target_directory() throws IOException { Path target = temporaryFolder.newFolder().toPath(); Path childFile1 = Files.createFile(target.resolve("file1.txt")); Path childDir1 = Files.createDirectory(target.resolve("subDir1")); Path childFile2 = Files.createFile(childDir1.resolve("file2.txt")); Path childDir2 = Files.createDirectory(childDir1.resolve("subDir2")); assertThat(target).isDirectory(); assertThat(childFile1).isRegularFile(); assertThat(childDir1).isDirectory(); assertThat(childFile2).isRegularFile(); assertThat(childDir2).isDirectory(); // on supporting FileSystem, target will change if directory is recreated Object targetKey = getFileKey(target); FileUtils2.cleanDirectory(target.toFile()); assertThat(target).isDirectory(); assertThat(childFile1).doesNotExist(); assertThat(childDir1).doesNotExist(); assertThat(childFile2).doesNotExist(); assertThat(childDir2).doesNotExist(); assertThat(getFileKey(target)).isEqualTo(targetKey); }
public static Predicate[] acceptVisitor(Predicate[] predicates, Visitor visitor, IndexRegistry indexes) { Predicate[] target = predicates; boolean copyCreated = false; for (int i = 0; i < predicates.length; i++) { Predicate predicate = predicates[i]; if (predicate instanceof VisitablePredicate visitablePredicate) { Predicate transformed = visitablePredicate.accept(visitor, indexes); if (transformed != predicate) { if (!copyCreated) { copyCreated = true; target = createCopy(target); } target[i] = transformed; } } } return target; }
@Test public void acceptVisitor_whenThereIsChange_thenReturnNewArray() { Visitor mockVisitor = mock(Visitor.class); Predicate[] predicates = new Predicate[2]; Predicate p1 = createMockVisitablePredicate(); predicates[0] = p1; Predicate transformed = mock(Predicate.class); Predicate p2 = createMockVisitablePredicate(transformed); predicates[1] = p2; Predicate[] result = VisitorUtils.acceptVisitor(predicates, mockVisitor, mockIndexes); assertThat(result).isNotSameAs(predicates); assertThat(result).hasSize(2); assertThat(result).containsExactlyInAnyOrder(p1, transformed); }
static void validateRegion(String region) { if (region == null) { throw new InvalidConfigurationException("The provided region is null."); } if (!AWS_REGION_PATTERN.matcher(region).matches()) { String message = String.format("The provided region %s is not a valid AWS region.", region); throw new InvalidConfigurationException(message); } }
@Test public void validateValidRegion() { RegionValidator.validateRegion("us-west-1"); RegionValidator.validateRegion("us-gov-east-1"); }
@Override public <PS extends Serializer<P>, P> KeyValueIterator<K, V> prefixScan(final P prefix, final PS prefixKeySerializer) { Objects.requireNonNull(prefix); Objects.requireNonNull(prefixKeySerializer); final NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>> nextIteratorFunction = new NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>>() { @Override public KeyValueIterator<K, V> apply(final ReadOnlyKeyValueStore<K, V> store) { try { return store.prefixScan(prefix, prefixKeySerializer); } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata."); } } }; final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); return new DelegatingPeekingKeyValueIterator<>( storeName, new CompositeKeyValueIterator<>(stores.iterator(), nextIteratorFunction)); }
@Test public void shouldSupportPrefixScanAcrossMultipleKVStores() { final KeyValueStore<String, String> cache = newStoreInstance(); stubProviderTwo.addStore(storeName, cache); stubOneUnderlying.put("a", "a"); stubOneUnderlying.put("b", "b"); stubOneUnderlying.put("z", "z"); cache.put("aa", "c"); cache.put("ab", "d"); cache.put("x", "x"); final List<KeyValue<String, String>> results = toList(theStore.prefixScan("a", new StringSerializer())); assertArrayEquals( asList( new KeyValue<>("a", "a"), new KeyValue<>("aa", "c"), new KeyValue<>("ab", "d") ).toArray(), results.toArray()); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentExecutor, TokenSecretAuthData authData, PhotosContainerResource data) throws Exception { // Make the data smugmug compatible data.transmogrify(transmogrificationConfig); try { SmugMugInterface smugMugInterface = getOrCreateSmugMugInterface(authData); for (PhotoAlbum album : data.getAlbums()) { idempotentExecutor.executeAndSwallowIOExceptions( album.getId(), album.getName(), () -> importSingleAlbum(jobId, album, smugMugInterface)); } for (PhotoModel photo : data.getPhotos()) { idempotentExecutor.executeAndSwallowIOExceptions( photo.getIdempotentId(), photo.getTitle(), () -> importSinglePhoto(jobId, idempotentExecutor, photo, smugMugInterface)); } } catch (IOException e) { monitor.severe(() -> "Error importing", e); return new ImportResult(e); } return ImportResult.OK; }
@Test public void importStoresAlbumInJobStore() throws Exception { // setup test objects UUID jobId = UUID.randomUUID(); PhotoAlbum photoAlbum1 = new PhotoAlbum("albumId1", "albumName1", "albumDescription1"); PhotoModel photoModel1 = new PhotoModel( "PHOTO_TITLE", "FETCHABLE_URL", "PHOTO_DESCRIPTION", "MEDIA_TYPE", "photoId1", photoAlbum1.getId(), false); PhotoModel photoModel2 = new PhotoModel( "PHOTO_TITLE", "FETCHABLE_URL", "PHOTO_DESCRIPTION", "MEDIA_TYPE", "photoId2", photoAlbum1.getId(), false); PhotoModel photoModel3 = new PhotoModel( "PHOTO_TITLE", "FETCHABLE_URL", "PHOTO_DESCRIPTION", "MEDIA_TYPE", "photoId3", photoAlbum1.getId(), false); PhotosContainerResource photosContainerResource1 = new PhotosContainerResource(Collections.singletonList(photoAlbum1), ImmutableList.of()); PhotosContainerResource photosContainerResource2 = new PhotosContainerResource( ImmutableList.of(), ImmutableList.of(photoModel1, photoModel2, photoModel3)); SmugMugAlbum smugMugAlbum1 = new SmugMugAlbum( "date", photoAlbum1.getDescription(), photoAlbum1.getName(), "privacy", "albumUri1", "urlname", "weburi"); String overflowAlbumName = smugMugAlbum1.getName() + " (1)"; SmugMugAlbum smugMugAlbum2 = new SmugMugAlbum( "date", photoAlbum1.getDescription(), overflowAlbumName, "privacy", "albumUri2", "urlname", "weburi"); SmugMugAlbumResponse mockAlbumResponse1 = new SmugMugAlbumResponse(smugMugAlbum1.getUri(), "Locator", "LocatorType", smugMugAlbum1); SmugMugAlbumResponse mockAlbumResponse2 = new SmugMugAlbumResponse(smugMugAlbum2.getUri(), "Locator", "LocatorType", smugMugAlbum2); when(smugMugInterface.createAlbum(eq(smugMugAlbum1.getName()))).thenReturn(mockAlbumResponse1); when(smugMugInterface.createAlbum(eq(smugMugAlbum2.getName()))).thenReturn(mockAlbumResponse2); SmugMugImageUploadResponse smugMugUploadImageResponse = new SmugMugImageUploadResponse( "imageUri", "albumImageUri", new ImageInfo("imageUri", "albumImageUri", "statusImageReplaceUri", "url")); when(smugMugInterface.uploadImage(any(), any(), any())).thenReturn(smugMugUploadImageResponse); when(smugMugInterface.getImageAsStream(any())).thenReturn(bufferedInputStream); // Run test SmugMugPhotosImporter importer = new SmugMugPhotosImporter( smugMugInterface, config, jobStore, new AppCredentials("key", "secret"), mock(ObjectMapper.class), monitor); ImportResult result = importer.importItem( jobId, EXECUTOR, new TokenSecretAuthData("token", "secret"), photosContainerResource1); result = importer.importItem( jobId, EXECUTOR, new TokenSecretAuthData("token", "secret"), photosContainerResource2); // Verify ArgumentCaptor<String> photoUrlsCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> albumNamesCaptor = ArgumentCaptor.forClass(String.class); verify(smugMugInterface, atLeastOnce()).createAlbum(albumNamesCaptor.capture()); verify(smugMugInterface, atLeastOnce()).getImageAsStream(photoUrlsCaptor.capture()); List<String> capturedAlbumNames = albumNamesCaptor.getAllValues(); assertTrue(capturedAlbumNames.contains(smugMugAlbum1.getName())); assertTrue(capturedAlbumNames.contains(smugMugAlbum2.getName())); List<String> capturedPhotoUrls = photoUrlsCaptor.getAllValues(); assertTrue(capturedPhotoUrls.contains(photoModel1.getFetchableUrl())); assertTrue(capturedPhotoUrls.contains(photoModel2.getFetchableUrl())); assertTrue(capturedPhotoUrls.contains(photoModel3.getFetchableUrl())); String overflowAlbumId = photoAlbum1.getId() + "-overflow-1"; assertThat((String) EXECUTOR.getCachedValue(photoAlbum1.getId())) .isEqualTo(smugMugAlbum1.getUri()); assertThat((String) EXECUTOR.getCachedValue(overflowAlbumId)).isEqualTo(smugMugAlbum2.getUri()); SmugMugPhotoTempData tempData1 = new SmugMugPhotoTempData( photoAlbum1.getId(), smugMugAlbum1.getName(), smugMugAlbum1.getDescription(), smugMugAlbum1.getUri(), 2, overflowAlbumId); SmugMugPhotoTempData tempData2 = new SmugMugPhotoTempData( overflowAlbumId, smugMugAlbum2.getName(), smugMugAlbum2.getDescription(), smugMugAlbum2.getUri(), 1, null); assertThat( jobStore .findData( jobId, String.format(TEMP_DATA_FORMAT, photoAlbum1.getId()), SmugMugPhotoTempData.class) .toString()) .isEqualTo(tempData1.toString()); assertThat( jobStore .findData( jobId, String.format(TEMP_DATA_FORMAT, overflowAlbumId), SmugMugPhotoTempData.class) .toString()) .isEqualTo(tempData2.toString()); }
@Override public TableEntryByTypeTransformer tableEntryByTypeTransformer() { return transformer; }
@Test void throws_for_multiple_empties_with_correct_method() throws Throwable { Map<String, String> fromValue = new LinkedHashMap<>(); fromValue.put("[empty]", "a"); fromValue.put("[blank]", "b"); Method method = JavaDefaultDataTableEntryTransformerDefinitionTest.class.getMethod("correct_method", Map.class, Type.class); JavaDefaultDataTableEntryTransformerDefinition definition = new JavaDefaultDataTableEntryTransformerDefinition( method, lookup, false, new String[] { "[empty]", "[blank]" }); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> definition.tableEntryByTypeTransformer().transform(fromValue, String.class, cellTransformer)); assertThat(exception.getMessage(), is( "After replacing [empty] and [blank] with empty strings the datatable entry contains duplicate keys: {[empty]=a, [blank]=b}")); }
@Override public OverlayData createOverlayData(ComponentName remoteApp) { if (!OS_SUPPORT_FOR_ACCENT) { return EMPTY; } try { final ActivityInfo activityInfo = mLocalContext .getPackageManager() .getActivityInfo(remoteApp, PackageManager.GET_META_DATA); final Context context = mLocalContext.createPackageContext(remoteApp.getPackageName(), CONTEXT_IGNORE_SECURITY); context.setTheme(activityInfo.getThemeResource()); fetchRemoteColors(mCurrentOverlayData, context); Logger.d( "OverlyDataCreatorForAndroid", "For component %s we fetched %s", remoteApp, mCurrentOverlayData); return mCurrentOverlayData; } catch (Exception e) { Logger.w("OverlyDataCreatorForAndroid", e, "Failed to fetch colors for %s", remoteApp); return EMPTY; } }
@Test public void testDoesNotFailIfMissingAttributeInTheme() { setupReturnedColors(R.style.MissingAttribute); final OverlayData overlayData = mUnderTest.createOverlayData(mComponentName); // primary and dark-primary are the defaults of the OS/SDK-level. I don't want to // verify their values since it may change. Assert.assertEquals(Color.parseColor("#ffff0000"), overlayData.getPrimaryTextColor()); Assert.assertTrue(overlayData.isValid()); }
@Override public void afterMethod(final TargetAdviceObject target, final TargetAdviceMethod method, final Object[] args, final Object result, final String pluginType) { MetricsCollectorRegistry.<CounterMetricsCollector>get(config, pluginType).inc(); }
@Test void assertCountRequests() { TargetAdviceObjectFixture targetObject = new TargetAdviceObjectFixture(); advice.afterMethod(targetObject, mock(TargetAdviceMethod.class), new Object[]{}, null, "FIXTURE"); assertThat(MetricsCollectorRegistry.get(config, "FIXTURE").toString(), is("1")); }
public LinkedList<LinkedList<Node>> computeWeaklyConnectedComponents(Graph graph, HashMap<Node, Integer> indices) { int N = graph.getNodeCount(); //Keep track of which nodes have been seen int[] color = new int[N]; Progress.start(progress, N); int seenCount = 0; LinkedList<LinkedList<Node>> components = new LinkedList<>(); while (seenCount < N) { //The search Q LinkedList<Node> Q = new LinkedList<>(); //The component-list LinkedList<Node> component = new LinkedList<>(); //Seed the search Q NodeIterable iter = graph.getNodes(); for (Node next : iter) { if (color[indices.get(next)] == 0) { Q.add(next); iter.doBreak(); break; } } //While there are more nodes to search while (!Q.isEmpty()) { if (isCanceled) { return new LinkedList<>(); } //Get the next Node and add it to the component list Node u = Q.removeFirst(); component.add(u); color[indices.get(u)] = 2; //Iterate over all of u's neighbors EdgeIterable edgeIter = graph.getEdges(u); //For each neighbor for (Edge edge : edgeIter) { Node reachable = graph.getOpposite(u, edge); int id = indices.get(reachable); //If this neighbor is unvisited if (color[id] == 0) { //Mark it as used color[id] = 1; //Add it to the search Q Q.addLast(reachable); } } seenCount++; Progress.progress(progress, seenCount); } components.add(component); } return components; }
@Test public void testComputeOneNodeWeaklyConnectedComponents() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1); UndirectedGraph graph = graphModel.getUndirectedGraph(); Node n = graph.getNode("0"); ConnectedComponents c = new ConnectedComponents(); HashMap<Node, Integer> indices = new HashMap<>(); indices.put(n, 0); LinkedList<LinkedList<Node>> components = c.computeWeaklyConnectedComponents(graph, indices); assertEquals(components.size(), 1); }
@Override public void writeQueryData(final ChannelHandlerContext context, final ProxyDatabaseConnectionManager databaseConnectionManager, final QueryCommandExecutor queryCommandExecutor, final int headerPackagesCount) throws SQLException { if (ResponseType.QUERY == queryCommandExecutor.getResponseType() && !context.channel().isActive()) { context.write(new PostgreSQLCommandCompletePacket(PostgreSQLCommand.SELECT.name(), 0L)); return; } processSimpleQuery(context, databaseConnectionManager, queryCommandExecutor); }
@Test void assertWriteQueryDataWithUpdate() throws SQLException { PostgreSQLCommandExecuteEngine commandExecuteEngine = new PostgreSQLCommandExecuteEngine(); when(queryCommandExecutor.getResponseType()).thenReturn(ResponseType.UPDATE); ProxyDatabaseConnectionManager databaseConnectionManager = mock(ProxyDatabaseConnectionManager.class, RETURNS_DEEP_STUBS); when(databaseConnectionManager.getConnectionSession()).thenReturn(connectionSession); commandExecuteEngine.writeQueryData(channelHandlerContext, databaseConnectionManager, queryCommandExecutor, 0); verify(channelHandlerContext).write(PostgreSQLReadyForQueryPacket.NOT_IN_TRANSACTION); }
@Override public void onPartitionsAssigned(final Collection<TopicPartition> partitions) { // NB: all task management is already handled by: // org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor.onAssignment if (assignmentErrorCode.get() == AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.code()) { log.error("Received error code {}. {}", AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.codeName(), AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.description()); taskManager.handleRebalanceComplete(); throw new MissingSourceTopicException("One or more source topics were missing during rebalance"); } else if (assignmentErrorCode.get() == AssignorError.VERSION_PROBING.code()) { log.info("Received version probing code {}", AssignorError.VERSION_PROBING); } else if (assignmentErrorCode.get() == AssignorError.ASSIGNMENT_ERROR.code()) { log.error("Received error code {}", AssignorError.ASSIGNMENT_ERROR); taskManager.handleRebalanceComplete(); throw new TaskAssignmentException("Hit an unexpected exception during task assignment phase of rebalance"); } else if (assignmentErrorCode.get() == AssignorError.SHUTDOWN_REQUESTED.code()) { log.error("A Kafka Streams client in this Kafka Streams application is requesting to shutdown the application"); taskManager.handleRebalanceComplete(); streamThread.shutdownToError(); return; } else if (assignmentErrorCode.get() != AssignorError.NONE.code()) { log.error("Received unknown error code {}", assignmentErrorCode.get()); throw new TaskAssignmentException("Hit an unrecognized exception during rebalance"); } streamThread.setState(State.PARTITIONS_ASSIGNED); streamThread.setPartitionAssignedTime(time.milliseconds()); taskManager.handleRebalanceComplete(); }
@Test public void shouldThrowMissingSourceTopicException() { assignmentErrorCode.set(AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.code()); final MissingSourceTopicException exception = assertThrows( MissingSourceTopicException.class, () -> streamsRebalanceListener.onPartitionsAssigned(Collections.emptyList()) ); assertThat(exception.getMessage(), is("One or more source topics were missing during rebalance")); verify(taskManager).handleRebalanceComplete(); }
public long iterate(Iterator<E> iterator) { return storage.iterate(iterator); }
@Test public void testIterate() { SparseIntArray.Iterator<Integer> iterator = new SparseIntArray.Iterator<>(); // test empty array verifyIterate(iterator); // try dense for (int i = 0; i < ARRAY_STORAGE_32_MAX_SPARSE_SIZE / 2; ++i) { set(i); verifyIterate(iterator); } // go sparse for (int i = 1000000; i < 1000000 + ARRAY_STORAGE_32_MAX_SPARSE_SIZE; ++i) { set(i); verifyIterate(iterator); } // clear everything we have added for (int i = 0; i < ARRAY_STORAGE_32_MAX_SPARSE_SIZE / 2; ++i) { clear(i); verifyIterate(iterator); } for (int i = 1000000; i < 1000000 + ARRAY_STORAGE_32_MAX_SPARSE_SIZE; ++i) { clear(i); verifyIterate(iterator); } // test empty again verifyIterate(iterator); // try gaps for (int i = 0; i < 1000; ++i) { set(i * i); verifyIterate(iterator); } // try larger gaps for (int i = (int) Math.sqrt(Integer.MAX_VALUE) - 1000; i < (int) Math.sqrt(Integer.MAX_VALUE); ++i) { set(i * i); verifyIterate(iterator); } // try some edge cases for (int i = -2; i <= 2; ++i) { set(i); verifyIterate(iterator); } for (int i = Short.MAX_VALUE - 2; i <= Short.MAX_VALUE + 2; ++i) { set(i); verifyIterate(iterator); } for (int i = Short.MIN_VALUE - 2; i <= Short.MIN_VALUE + 2; ++i) { set(i); verifyIterate(iterator); } for (long i = (long) Integer.MAX_VALUE - 2; i <= (long) Integer.MAX_VALUE + 2; ++i) { set((int) i); verifyIterate(iterator); } for (long i = (long) Integer.MIN_VALUE - 2; i <= (long) Integer.MIN_VALUE + 2; ++i) { set((int) i); verifyIterate(iterator); } }
@Override public Registry getRegistry(URL url) { if (registryManager == null) { throw new IllegalStateException("Unable to fetch RegistryManager from ApplicationModel BeanFactory. " + "Please check if `setApplicationModel` has been override."); } Registry defaultNopRegistry = registryManager.getDefaultNopRegistryIfDestroyed(); if (null != defaultNopRegistry) { return defaultNopRegistry; } url = URLBuilder.from(url) .setPath(RegistryService.class.getName()) .addParameter(INTERFACE_KEY, RegistryService.class.getName()) .removeParameter(TIMESTAMP_KEY) .removeAttribute(EXPORT_KEY) .removeAttribute(REFER_KEY) .build(); String key = createRegistryCacheKey(url); Registry registry = null; boolean check = url.getParameter(CHECK_KEY, true) && url.getPort() != 0; // Lock the registry access process to ensure a single instance of the registry registryManager.getRegistryLock().lock(); try { // double check // fix https://github.com/apache/dubbo/issues/7265. defaultNopRegistry = registryManager.getDefaultNopRegistryIfDestroyed(); if (null != defaultNopRegistry) { return defaultNopRegistry; } registry = registryManager.getRegistry(key); if (registry != null) { return registry; } // create registry by spi/ioc registry = createRegistry(url); if (check && registry == null) { throw new IllegalStateException("Can not create registry " + url); } if (registry != null) { registryManager.putRegistry(key, registry); } } catch (Exception e) { if (check) { throw new RuntimeException("Can not create registry " + url, e); } else { // 1-11 Failed to obtain or create registry (service) object. LOGGER.warn(REGISTRY_FAILED_CREATE_INSTANCE, "", "", "Failed to obtain or create registry ", e); } } finally { // Release the lock registryManager.getRegistryLock().unlock(); } return registry; }
@Test void testDestroyAllRegistries() { Registry registry1 = registryFactory.getRegistry(URL.valueOf("dubbo://" + NetUtils.getLocalHost() + ":8888?group=xxx")); Registry registry2 = registryFactory.getRegistry(URL.valueOf("dubbo://" + NetUtils.getLocalHost() + ":9999?group=yyy")); Registry registry3 = new AbstractRegistry(URL.valueOf("dubbo://" + NetUtils.getLocalHost() + ":2020?group=yyy")) { @Override public boolean isAvailable() { return true; } }; RegistryManager registryManager = ApplicationModel.defaultModel().getBeanFactory().getBean(RegistryManager.class); Collection<Registry> registries = registryManager.getRegistries(); Assertions.assertTrue(registries.contains(registry1)); Assertions.assertTrue(registries.contains(registry2)); registry3.destroy(); registries = registryManager.getRegistries(); Assertions.assertFalse(registries.contains(registry3)); registryManager.destroyAll(); registries = registryManager.getRegistries(); Assertions.assertFalse(registries.contains(registry1)); Assertions.assertFalse(registries.contains(registry2)); }
@Override public Mono<UserNotificationPreference> getByUser(String username) { var configName = buildUserPreferenceConfigMapName(username); return client.fetch(ConfigMap.class, configName) .map(config -> { if (config.getData() == null) { return new UserNotificationPreference(); } String s = config.getData().get(NOTIFICATION_PREFERENCE); if (StringUtils.isNotBlank(s)) { return JsonUtils.jsonToObject(s, UserNotificationPreference.class); } return new UserNotificationPreference(); }) .defaultIfEmpty(new UserNotificationPreference()); }
@Test void getByUser() { var configMap = new ConfigMap(); configMap.setData(Map.of("notification", "{\"reasonTypeNotifier\":{\"comment\":{\"notifiers\":[\"test-notifier\"]}}}")); when(client.fetch(ConfigMap.class, "user-preferences-guqing")) .thenReturn(Mono.just(configMap)); userNotificationPreferenceService.getByUser("guqing") .as(StepVerifier::create) .consumeNextWith(preference -> { assertThat(preference.getReasonTypeNotifier()).isNotNull(); assertThat(preference.getReasonTypeNotifier().get("comment")).isNotNull(); assertThat(preference.getReasonTypeNotifier().get("comment").getNotifiers()) .containsExactly("test-notifier"); }) .verifyComplete(); verify(client).fetch(ConfigMap.class, "user-preferences-guqing"); }
public static long checkPositive(long n, String name) { if (n <= 0) { throw new IllegalArgumentException(name + ": " + n + " (expected: > 0)"); } return n; }
@Test public void checkPositiveMustPassIfArgumentIsGreaterThanZero() { final long n = 1; final long actual = RangeUtil.checkPositive(n, "var"); assertThat(actual, is(equalTo(n))); }
@Override public void close() throws IOException { reader.close(); dlm.close(); }
@Test public void testClose() throws Exception { DistributedLogManager dlm = mock(DistributedLogManager.class); LogReader reader = mock(LogReader.class); when(dlm.getInputStream(any(DLSN.class))).thenReturn(reader); DLInputStream in = new DLInputStream(dlm); verify(dlm, times(1)).getInputStream(eq(DLSN.InitialDLSN)); in.close(); verify(dlm, times(1)).close(); verify(reader, times(1)).close(); }
public void completeExceptionally(Throwable value) { checkNotNull(value); if (this.value != EMPTY) { throw new IllegalStateException("Promise is already completed"); } this.value = value; this.exceptional = true; for (BiConsumer<E, Throwable> consumer : consumers) { try { consumer.accept(null, value); } catch (Exception e) { eventloop.logger.warning(e); } } if (releaseOnComplete) { release(); } }
@Test(expected = NullPointerException.class) public void test_completeExceptionallyWhenNull() { Promise<String> promise = new Promise<>(reactor.eventloop); promise.completeExceptionally(null); }
@Override public void apply(IntentOperationContext<FlowRuleIntent> context) { Optional<IntentData> toUninstall = context.toUninstall(); Optional<IntentData> toInstall = context.toInstall(); if (toInstall.isPresent() && toUninstall.isPresent()) { Intent intentToInstall = toInstall.get().intent(); if (requireNonDisruptive(intentToInstall) && INSTALLED.equals(toUninstall.get().state())) { reallocate(context); return; } } if (!toInstall.isPresent() && !toUninstall.isPresent()) { // Nothing to do. intentInstallCoordinator.intentInstallSuccess(context); return; } List<FlowRuleIntent> uninstallIntents = context.intentsToUninstall(); List<FlowRuleIntent> installIntents = context.intentsToInstall(); List<FlowRule> flowRulesToUninstall; List<FlowRule> flowRulesToInstall; if (toUninstall.isPresent()) { // Remove tracked resource from both Intent and installable Intents. trackIntentResources(toUninstall.get(), uninstallIntents, REMOVE); // Retrieves all flow rules from all flow rule Intents. flowRulesToUninstall = uninstallIntents.stream() .map(FlowRuleIntent::flowRules) .flatMap(Collection::stream) .filter(flowRule -> flowRuleService.getFlowEntry(flowRule) != null) .collect(Collectors.toList()); } else { // No flow rules to be uninstalled. flowRulesToUninstall = Collections.emptyList(); } if (toInstall.isPresent()) { // Track resource from both Intent and installable Intents. trackIntentResources(toInstall.get(), installIntents, ADD); // Retrieves all flow rules from all flow rule Intents. flowRulesToInstall = installIntents.stream() .map(FlowRuleIntent::flowRules) .flatMap(Collection::stream) .collect(Collectors.toList()); } else { // No flow rules to be installed. flowRulesToInstall = Collections.emptyList(); } List<FlowRule> flowRuleToModify; List<FlowRule> dontTouch; // If both uninstall/install list contained equal (=match conditions are equal) FlowRules, // omit it from remove list, since it will/should be overwritten by install flowRuleToModify = flowRulesToInstall.stream() .filter(flowRule -> flowRulesToUninstall.stream().anyMatch(flowRule::equals)) .collect(Collectors.toList()); // If both contained exactMatch-ing FlowRules, remove from both list, // since it will result in no-op. dontTouch = flowRulesToInstall.stream() .filter(flowRule -> flowRulesToUninstall.stream().anyMatch(flowRule::exactMatch)) .collect(Collectors.toList()); flowRulesToUninstall.removeAll(flowRuleToModify); flowRulesToUninstall.removeAll(dontTouch); flowRulesToInstall.removeAll(flowRuleToModify); flowRulesToInstall.removeAll(dontTouch); flowRuleToModify.removeAll(dontTouch); if (flowRulesToInstall.isEmpty() && flowRulesToUninstall.isEmpty() && flowRuleToModify.isEmpty()) { // There is no flow rules to install/uninstall intentInstallCoordinator.intentInstallSuccess(context); return; } FlowRuleOperations.Builder builder = FlowRuleOperations.builder(); // Add flows flowRulesToInstall.forEach(builder::add); // Modify flows flowRuleToModify.forEach(builder::modify); // Remove flows flowRulesToUninstall.forEach(builder::remove); FlowRuleOperationsContext flowRuleOperationsContext = new FlowRuleOperationsContext() { @Override public void onSuccess(FlowRuleOperations ops) { intentInstallCoordinator.intentInstallSuccess(context); } @Override public void onError(FlowRuleOperations ops) { intentInstallCoordinator.intentInstallFailed(context); } }; FlowRuleOperations operations = builder.build(flowRuleOperationsContext); log.debug("applying intent {} -> {} with {} rules: {}", toUninstall.map(x -> x.key().toString()).orElse("<empty>"), toInstall.map(x -> x.key().toString()).orElse("<empty>"), operations.stages().stream().mapToLong(Set::size).sum(), operations.stages()); flowRuleService.apply(operations); }
@Test public void testUninstallAndInstall() { List<Intent> intentsToInstall = createAnotherFlowRuleIntents(); List<Intent> intentsToUninstall = createFlowRuleIntents(); IntentData toInstall = new IntentData(createP2PIntent(), IntentState.INSTALLING, new WallClockTimestamp()); toInstall = IntentData.compiled(toInstall, intentsToInstall); IntentData toUninstall = new IntentData(createP2PIntent(), IntentState.INSTALLED, new WallClockTimestamp()); toUninstall = IntentData.compiled(toUninstall, intentsToUninstall); IntentOperationContext<FlowRuleIntent> operationContext; IntentInstallationContext context = new IntentInstallationContext(toUninstall, toInstall); operationContext = new IntentOperationContext(intentsToUninstall, intentsToInstall, context); flowRuleService.load(operationContext.intentsToUninstall()); installer.apply(operationContext); IntentOperationContext successContext = intentInstallCoordinator.successContext; assertEquals(successContext, operationContext); Set<FlowRule> expectedFlowRules = intentsToUninstall.stream() .map(intent -> (FlowRuleIntent) intent) .map(FlowRuleIntent::flowRules) .flatMap(Collection::stream) .collect(Collectors.toSet()); assertEquals(expectedFlowRules, flowRuleService.flowRulesRemove); expectedFlowRules = intentsToInstall.stream() .map(intent -> (FlowRuleIntent) intent) .map(FlowRuleIntent::flowRules) .flatMap(Collection::stream) .collect(Collectors.toSet()); assertEquals(expectedFlowRules, flowRuleService.flowRulesAdd); }
public static CookieBuilder newCookieBuilder(HttpRequest request) { return new CookieBuilder(request); }
@Test public void create_not_secured_cookie_when_header_is_not_http() { when(request.getHeader(HTTPS_HEADER)).thenReturn("http"); Cookie cookie = newCookieBuilder(request).setName("name").setValue("value").setHttpOnly(true).setExpiry(10).build(); assertThat(cookie.isSecure()).isFalse(); }
@Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { Map<String, Object> attachments = invocation.getObjectAttachments(); if (attachments != null) { Map<String, Object> newAttach = new HashMap<>(attachments.size()); for (Map.Entry<String, Object> entry : attachments.entrySet()) { String key = entry.getKey(); if (!UNLOADING_KEYS.contains(key)) { newAttach.put(key, entry.getValue()); } } attachments = newAttach; } RpcContext.getServiceContext().setInvoker(invoker).setInvocation(invocation); RpcContext context = RpcContext.getServerAttachment(); // .setAttachments(attachments) // merged from dubbox if (context.getLocalAddress() == null) { context.setLocalAddress(invoker.getUrl().getHost(), invoker.getUrl().getPort()); } String remoteApplication = invocation.getAttachment(REMOTE_APPLICATION_KEY); if (StringUtils.isNotEmpty(remoteApplication)) { RpcContext.getServiceContext().setRemoteApplicationName(remoteApplication); } else { RpcContext.getServiceContext().setRemoteApplicationName(context.getAttachment(REMOTE_APPLICATION_KEY)); } long timeout = RpcUtils.getTimeout(invocation, -1); if (timeout != -1) { // pass to next hop RpcContext.getServerAttachment() .setObjectAttachment( TIME_COUNTDOWN_KEY, TimeoutCountDown.newCountDown(timeout, TimeUnit.MILLISECONDS)); } // merged from dubbox // we may already add some attachments into RpcContext before this filter (e.g. in rest protocol) if (CollectionUtils.isNotEmptyMap(attachments)) { if (context.getObjectAttachments().size() > 0) { context.getObjectAttachments().putAll(attachments); } else { context.setObjectAttachments(attachments); } } if (invocation instanceof RpcInvocation) { RpcInvocation rpcInvocation = (RpcInvocation) invocation; rpcInvocation.setInvoker(invoker); } try { context.clearAfterEachInvoke(false); return invoker.invoke(invocation); } finally { context.clearAfterEachInvoke(true); if (context.isAsyncStarted()) { removeContext(); } } }
@Test void testWithAttachments() { URL url = URL.valueOf("test://test:11/test?group=dubbo&version=1.1"); Invoker<DemoService> invoker = new MyInvoker<DemoService>(url); Invocation invocation = new MockInvocation(); Result result = contextFilter.invoke(invoker, invocation); assertNotNull(RpcContext.getServiceContext().getInvoker()); }
public static Optional<DwrfTableEncryptionProperties> fromHiveTableProperties(Map<String, String> properties) { String encryptTable = properties.get(ENCRYPT_TABLE_KEY); String encryptColumns = properties.get(ENCRYPT_COLUMNS_KEY); if (encryptTable != null || encryptColumns != null) { if (!properties.containsKey(DWRF_ENCRYPTION_ALGORITHM_KEY) || !properties.containsKey(DWRF_ENCRYPTION_PROVIDER_KEY)) { throw new PrestoException(HIVE_INVALID_ENCRYPTION_METADATA, format("Both %s and %s need to be set for DWRF encryption", DWRF_ENCRYPTION_ALGORITHM_KEY, DWRF_ENCRYPTION_PROVIDER_KEY)); } if (encryptTable != null) { return Optional.of(forTable(encryptTable, properties.get(DWRF_ENCRYPTION_ALGORITHM_KEY), properties.get(DWRF_ENCRYPTION_PROVIDER_KEY))); } return Optional.of(forPerColumn( fromHiveProperty(encryptColumns), properties.get(DWRF_ENCRYPTION_ALGORITHM_KEY), properties.get(DWRF_ENCRYPTION_PROVIDER_KEY))); } return Optional.empty(); }
@Test public void testFromHiveTablePropertiesColumnPresent() { Map<String, String> hiveProperties = ImmutableMap.of( ENCRYPT_COLUMNS_KEY, "key1:col1", DWRF_ENCRYPTION_ALGORITHM_KEY, "test_algo", DWRF_ENCRYPTION_PROVIDER_KEY, "test_prov"); Optional<DwrfTableEncryptionProperties> encryptionProperties = fromHiveTableProperties(hiveProperties); assertTrue(encryptionProperties.isPresent()); assertEquals(encryptionProperties.get().toHiveProperties(), hiveProperties); }
static boolean needWrap(MethodDescriptor methodDescriptor, Class<?>[] parameterClasses, Class<?> returnClass) { String methodName = methodDescriptor.getMethodName(); // generic call must be wrapped if (CommonConstants.$INVOKE.equals(methodName) || CommonConstants.$INVOKE_ASYNC.equals(methodName)) { return true; } // echo must be wrapped if ($ECHO.equals(methodName)) { return true; } boolean returnClassProtobuf = isProtobufClass(returnClass); // Response foo() if (parameterClasses.length == 0) { return !returnClassProtobuf; } int protobufParameterCount = 0; int javaParameterCount = 0; int streamParameterCount = 0; boolean secondParameterStream = false; // count normal and protobuf param for (int i = 0; i < parameterClasses.length; i++) { Class<?> parameterClass = parameterClasses[i]; if (isProtobufClass(parameterClass)) { protobufParameterCount++; } else { if (isStreamType(parameterClass)) { if (i == 1) { secondParameterStream = true; } streamParameterCount++; } else { javaParameterCount++; } } } // more than one stream param if (streamParameterCount > 1) { throw new IllegalStateException("method params error: more than one Stream params. method=" + methodName); } // protobuf only support one param if (protobufParameterCount >= 2) { throw new IllegalStateException("method params error: more than one protobuf params. method=" + methodName); } // server stream support one normal param and one stream param if (streamParameterCount == 1) { if (javaParameterCount + protobufParameterCount > 1) { throw new IllegalStateException( "method params error: server stream does not support more than one normal param." + " method=" + methodName); } // server stream: void foo(Request, StreamObserver<Response>) if (!secondParameterStream) { throw new IllegalStateException( "method params error: server stream's second param must be StreamObserver." + " method=" + methodName); } } if (methodDescriptor.getRpcType() != MethodDescriptor.RpcType.UNARY) { if (MethodDescriptor.RpcType.SERVER_STREAM == methodDescriptor.getRpcType()) { if (!secondParameterStream) { throw new IllegalStateException( "method params error:server stream's second param must be StreamObserver." + " method=" + methodName); } } // param type must be consistent if (returnClassProtobuf) { if (javaParameterCount > 0) { throw new IllegalStateException( "method params error: both normal and protobuf param found. method=" + methodName); } } else { if (protobufParameterCount > 0) { throw new IllegalStateException("method params error method=" + methodName); } } } else { if (streamParameterCount > 0) { throw new IllegalStateException( "method params error: unary method should not contain any StreamObserver." + " method=" + methodName); } if (protobufParameterCount > 0 && returnClassProtobuf) { return false; } // handler reactor or rxjava only consider gen by proto if (isMono(returnClass) || isRx(returnClass)) { return false; } if (protobufParameterCount <= 0 && !returnClassProtobuf) { return true; } // handle grpc stub only consider gen by proto if (GRPC_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName()) && protobufParameterCount == 1) { return false; } // handle dubbo generated method if (TRI_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName())) { Class<?> actualReturnClass = (Class<?>) ((ParameterizedType) methodDescriptor.getMethod().getGenericReturnType()) .getActualTypeArguments()[0]; boolean actualReturnClassProtobuf = isProtobufClass(actualReturnClass); if (actualReturnClassProtobuf && protobufParameterCount == 1) { return false; } if (!actualReturnClassProtobuf && protobufParameterCount == 0) { return true; } } // todo remove this in future boolean ignore = checkNeedIgnore(returnClass); if (ignore) { return protobufParameterCount != 1; } throw new IllegalStateException("method params error method=" + methodName); } // java param should be wrapped return javaParameterCount > 0; }
@Test void testIsNeedWrap() throws NoSuchMethodException { Method method = DescriptorService.class.getMethod("noParameterMethod"); MethodDescriptor descriptor = new ReflectionMethodDescriptor(method); Assertions.assertTrue(needWrap(descriptor)); method = DescriptorService.class.getMethod("sayHello", HelloReply.class); descriptor = new ReflectionMethodDescriptor(method); Assertions.assertFalse(needWrap(descriptor)); }
public ConfigurationProperty create(String key, String value, String encryptedValue, Boolean isSecure) { ConfigurationProperty configurationProperty = new ConfigurationProperty(); configurationProperty.setConfigurationKey(new ConfigurationKey(key)); if (isNotBlank(value) && isNotBlank(encryptedValue)) { configurationProperty.addError("configurationValue", "You may only specify `value` or `encrypted_value`, not both!"); configurationProperty.addError("encryptedValue", "You may only specify `value` or `encrypted_value`, not both!"); configurationProperty.setConfigurationValue(new ConfigurationValue(value)); configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encryptedValue)); return configurationProperty; } if (isSecure) { if (isNotBlank(encryptedValue)) { configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encryptedValue)); } if (isNotBlank(value)) { configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encrypt(value))); } } else { if (isNotBlank(encryptedValue)) { configurationProperty.addError("encryptedValue", "encrypted_value cannot be specified to a unsecured property."); configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encryptedValue)); } if (value != null) { configurationProperty.setConfigurationValue(new ConfigurationValue(value)); } } if (isNotBlank(configurationProperty.getEncryptedValue())) { configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(configurationProperty.getEncryptedValue())); } return configurationProperty; }
@Test public void shouldCreatePropertyInAbsenceOfPlainAndEncryptedTextInputForSecureProperty() throws Exception { Property key = new Property("key"); key.with(Property.SECURE, true); ConfigurationProperty property = new ConfigurationPropertyBuilder().create("key", null, null, true); assertThat(property.errors().size(), is(0)); assertThat(property.getConfigKeyName(), is("key")); assertNull(property.getEncryptedConfigurationValue()); assertNull(property.getConfigurationValue()); }
@GET @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public TimelineAbout about( @Context HttpServletRequest req, @Context HttpServletResponse res) { init(res); return TimelineUtils.createTimelineAbout("Timeline Reader API"); }
@Test void testAbout() throws Exception { URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"); Client client = createClient(); try { ClientResponse resp = getResponse(client, uri); TimelineAbout about = resp.getEntity(TimelineAbout.class); assertNotNull(about); assertEquals("Timeline Reader API", about.getAbout()); } finally { client.destroy(); } }
public static int getEntryIdentifier(int resId) { return resId & 0x0000FFFF; }
@Test public void testGetEntryIdentifier() { assertThat(ResourceIds.getEntryIdentifier(0x01019876)).isEqualTo(0x9876); assertThat(ResourceIds.getEntryIdentifier(0x7F781234)).isEqualTo(0x1234); }
@Override public Object decode(Response response, Type type) throws IOException, DecodeException { if (response.status() == 404 || response.status() == 204) if (JSONObject.class.isAssignableFrom((Class<?>) type)) return new JSONObject(); else if (JSONArray.class.isAssignableFrom((Class<?>) type)) return new JSONArray(); else if (String.class.equals(type)) return null; else throw new DecodeException(response.status(), format("%s is not a type supported by this decoder.", type), response.request()); if (response.body() == null) return null; try (Reader reader = response.body().asReader(response.charset())) { Reader bodyReader = (reader.markSupported()) ? reader : new BufferedReader(reader); bodyReader.mark(1); if (bodyReader.read() == -1) { return null; // Empty body } bodyReader.reset(); return decodeBody(response, type, bodyReader); } catch (JSONException jsonException) { if (jsonException.getCause() != null && jsonException.getCause() instanceof IOException) { throw (IOException) jsonException.getCause(); } throw new DecodeException(response.status(), jsonException.getMessage(), response.request(), jsonException); } }
@Test void nullBodyDecodesToEmpty() throws IOException { Response response = Response.builder() .status(204) .reason("OK") .headers(Collections.emptyMap()) .request(request) .build(); assertThat(((JSONObject) new JsonDecoder().decode(response, JSONObject.class)).isEmpty()) .isTrue(); }
public static void main(String[] args) { // eagerly initialized multiton LOGGER.info("Printing out eagerly initialized multiton contents"); LOGGER.info("KHAMUL={}", Nazgul.getInstance(NazgulName.KHAMUL)); LOGGER.info("MURAZOR={}", Nazgul.getInstance(NazgulName.MURAZOR)); LOGGER.info("DWAR={}", Nazgul.getInstance(NazgulName.DWAR)); LOGGER.info("JI_INDUR={}", Nazgul.getInstance(NazgulName.JI_INDUR)); LOGGER.info("AKHORAHIL={}", Nazgul.getInstance(NazgulName.AKHORAHIL)); LOGGER.info("HOARMURATH={}", Nazgul.getInstance(NazgulName.HOARMURATH)); LOGGER.info("ADUNAPHEL={}", Nazgul.getInstance(NazgulName.ADUNAPHEL)); LOGGER.info("REN={}", Nazgul.getInstance(NazgulName.REN)); LOGGER.info("UVATHA={}", Nazgul.getInstance(NazgulName.UVATHA)); // enum multiton LOGGER.info("Printing out enum-based multiton contents"); LOGGER.info("KHAMUL={}", NazgulEnum.KHAMUL); LOGGER.info("MURAZOR={}", NazgulEnum.MURAZOR); LOGGER.info("DWAR={}", NazgulEnum.DWAR); LOGGER.info("JI_INDUR={}", NazgulEnum.JI_INDUR); LOGGER.info("AKHORAHIL={}", NazgulEnum.AKHORAHIL); LOGGER.info("HOARMURATH={}", NazgulEnum.HOARMURATH); LOGGER.info("ADUNAPHEL={}", NazgulEnum.ADUNAPHEL); LOGGER.info("REN={}", NazgulEnum.REN); LOGGER.info("UVATHA={}", NazgulEnum.UVATHA); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public SparkPipelineResult run(Pipeline pipeline) { boolean isStreaming = options.isStreaming() || options.as(TestSparkPipelineOptions.class).isForceStreaming(); // Default to using the primitive versions of Read.Bounded and Read.Unbounded. // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } JavaSparkContext jsc = SparkContextFactory.getSparkContext(pipeline.getOptions().as(SparkPipelineOptions.class)); JavaStreamingContext jssc = new JavaStreamingContext(jsc, new org.apache.spark.streaming.Duration(1000)); SparkRunner.initAccumulators(options, jsc); TransformTranslator.Translator translator = new TransformTranslator.Translator(); SparkNativePipelineVisitor visitor; if (isStreaming) { SparkPipelineTranslator streamingTranslator = new StreamingTransformTranslator.Translator(translator); EvaluationContext ctxt = new EvaluationContext(jsc, pipeline, options, jssc); visitor = new SparkNativePipelineVisitor(streamingTranslator, ctxt); } else { EvaluationContext ctxt = new EvaluationContext(jsc, pipeline, options, jssc); visitor = new SparkNativePipelineVisitor(translator, ctxt); } pipeline.traverseTopologically(visitor); SparkContextFactory.stopSparkContext(jsc); String debugString = visitor.getDebugString(); LOG.info("Translated Native Spark pipeline:\n{}", debugString); return new DebugSparkPipelineResult(debugString); }
@Test public void debugBatchPipelineWithContextfulTransform() { PipelineOptions options = contextRule.configure(PipelineOptionsFactory.create()); options.setRunner(SparkRunnerDebugger.class); Pipeline pipeline = Pipeline.create(options); final PCollectionView<Integer> view = pipeline.apply("Dummy", Create.of(0)).apply(View.asSingleton()); pipeline .apply(Create.of(Arrays.asList(0))) .setCoder(VarIntCoder.of()) .apply( MapElements.into(new TypeDescriptor<Integer>() {}) .via(fn((element, c) -> element, requiresSideInputs(view)))); SparkRunnerDebugger.DebugSparkPipelineResult result = (SparkRunnerDebugger.DebugSparkPipelineResult) pipeline.run(); final String expectedPipeline = "sparkContext.<impulse>()\n" + "_.mapPartitions(new org.apache.beam.sdk.transforms.Create$Values$2())\n" + "_.aggregate(..., new org.apache.beam.sdk.transforms.View$SingletonCombineFn(), ...)\n" + "_.<createPCollectionView>\n" + "sparkContext.<impulse>()\n" + "_.mapPartitions(new org.apache.beam.sdk.transforms.Create$Values$2())\n" + "_.mapPartitions(new org.apache.beam.sdk.transforms.Contextful())"; assertThat( "Debug pipeline did not equal expected", result.getDebugString(), Matchers.equalTo(expectedPipeline)); }
public static Document readDocument(@Nonnull final InputStream stream) throws ExecutionException, InterruptedException { return readDocumentAsync(stream).get(); }
@Test public void testReader() throws Exception { // Setup test fixture. final Reader input = new StringReader("<foo><bar>test</bar></foo>"); // Execute system under test. final Document output = SAXReaderUtil.readDocument(input); // Verify result. assertNotNull(output); assertEquals("foo", output.getRootElement().getName()); assertNotNull(output.getRootElement().element("bar")); assertEquals("test", output.getRootElement().elementText("bar")); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public NodeInfo get() { return getNodeInfo(); }
@Test public void testNode() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("node") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); verifyNodeInfo(json); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testDodgyProtect() { ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", PROTECT, "", 0); itemChargePlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_DODGY_NECKLACE, 9); }