focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public Query query() { return query; }
@Test void testQuery() { Query query = Query.parse("foo=bar&baz", Name::of); Map<String, String> expected = new LinkedHashMap<>(); expected.put("foo", "bar"); expected.put("baz", null); assertEquals(expected, query.lastEntries()); expected.remove("baz"); assertEquals(expected, query.remove("baz").lastEntries()); expected.put("baz", null); expected.remove("foo"); assertEquals(expected, query.remove("foo").lastEntries()); assertEquals(expected, Query.empty(Name::of).set("baz").lastEntries()); assertEquals("query 'foo=bar&baz=bax&quu=fez&moo'", query.set("baz", "bax").set(Map.of("quu", "fez")).set("moo").toString()); Query bloated = query.add("baz", "bax").add(Map.of("quu", List.of("fez", "pop"))).add("moo").add("moo").add("foo", "bar"); List<String> foos = new ArrayList<>(); foos.add("bar"); foos.add("bar"); List<String> bazs = new ArrayList<>(); bazs.add(null); bazs.add("bax"); List<String> quus = new ArrayList<>(); quus.add("fez"); quus.add("pop"); List<String> moos = new ArrayList<>(); moos.add(null); moos.add(null); assertEquals(List.of(Map.entry("foo", foos), Map.entry("baz", bazs), Map.entry("quu", quus), Map.entry("moo", moos)), new ArrayList<>(bloated.entries().entrySet())); Map<String, String> last = new LinkedHashMap<>(); last.put("foo", "bar"); last.put("baz", "bax"); last.put("quu", "pop"); last.put("moo", null); assertEquals(new ArrayList<>(last.entrySet()), new ArrayList<>(bloated.lastEntries().entrySet())); assertEquals("query 'foo=bar&baz&baz=bax&quu=fez&quu=pop&moo&moo&foo=bar'", bloated.toString()); assertEquals("query 'foo=bar&quu=fez&quu=pop&moo&moo&foo=bar'", bloated.remove("baz").toString()); assertEquals("query 'baz&baz=bax&quu=fez&quu=pop&moo&moo'", bloated.remove("foo").toString()); assertEquals("query 'foo=bar&baz&baz=bax&quu=fez&quu=pop&foo=bar&moo'", bloated.set("moo").toString()); assertEquals("no query", bloated.remove(last.keySet()).toString()); assertThrows(NullPointerException.class, () -> query.remove((String) null)); assertThrows(NullPointerException.class, () -> query.add((String) null)); assertThrows(NullPointerException.class, () -> query.add(null, "hax")); assertThrows(NullPointerException.class, () -> query.add("hax", null)); Map<String, String> names = new LinkedHashMap<>(); names.put(null, "hax"); assertThrows(NullPointerException.class, () -> query.set(names)); }
@Override public void run() { try { backgroundJobServer.getJobSteward().notifyThreadOccupied(); MDCMapper.loadMDCContextFromJob(job); performJob(); } catch (Exception e) { if (isJobDeletedWhileProcessing(e)) { // nothing to do anymore as Job is deleted return; } else if (isJobServerStopped(e)) { updateJobStateToFailedAndRunJobFilters("Job processing was stopped as background job server has stopped", e); Thread.currentThread().interrupt(); } else if (isJobNotFoundException(e)) { updateJobStateToFailedAndRunJobFilters("Job method not found", e); } else { updateJobStateToFailedAndRunJobFilters("An exception occurred during the performance of the job", e); } } finally { backgroundJobServer.getJobSteward().notifyThreadIdle(); MDC.clear(); } }
@Test void mdcIsAlsoAvailableDuringLoggingOfJobSuccess() throws Exception { // GIVEN Job job = anEnqueuedJob().build(); MDC.put("testKey", "testValue"); MDCMapper.saveMDCContextToJob(job); BackgroundJobRunner runner = mock(BackgroundJobRunner.class); when(backgroundJobServer.getBackgroundJobRunner(job)).thenReturn(runner); BackgroundJobPerformer backgroundJobPerformer = new BackgroundJobPerformer(backgroundJobServer, job); ListAppender logger = LoggerAssert.initFor(backgroundJobPerformer); // WHEN backgroundJobPerformer.run(); // THEN assertThat(logger) .hasDebugMessageContaining( "Job(id=" + job.getId() + ", jobName='" + job.getJobName() + "') processing succeeded", Map.of( "jobrunr.jobId", job.getId().toString(), "jobrunr.jobName", job.getJobName(), "testKey", "testValue" )); assertThat(MDC.getCopyOfContextMap()).isNullOrEmpty(); // backgroundJobPerformer clears MDC Context }
@Override public Map<Errors, Integer> errorCounts() { HashMap<Errors, Integer> counts = new HashMap<>(); updateErrorCounts(counts, Errors.forCode(data.errorCode())); return counts; }
@Test public void testErrorCountsReturnsOneError() { PushTelemetryResponseData data = new PushTelemetryResponseData() .setErrorCode(Errors.CLUSTER_AUTHORIZATION_FAILED.code()); data.setErrorCode(Errors.INVALID_CONFIG.code()); PushTelemetryResponse response = new PushTelemetryResponse(data); assertEquals(Collections.singletonMap(Errors.INVALID_CONFIG, 1), response.errorCounts()); }
@VisibleForTesting CommandReturn getRawDiskInfo() throws IOException { return ShellUtils.execCommandWithOutput("df", "-k", "-P", "-T", mJournalPath); }
@Test public void testFailedInfo() throws IOException { JournalSpaceMonitor monitor = Mockito.spy( new JournalSpaceMonitor(Paths.get(".").toAbsolutePath().toString(), 10)); doThrow(new IOException("couldnt run")).when(monitor).getRawDiskInfo(); assertThrows(IOException.class, monitor::getDiskInfo); }
public static String execCommand(String... cmd) throws IOException { return execCommand(cmd, -1); }
@Test public void testHeadDevZero() throws Exception { final int length = 100000; String output = Shell.execCommand("head", "-c", Integer.toString(length), "/dev/zero"); assertEquals(length, output.length()); }
public FEELFnResult<List<Object>> invoke(@ParameterName( "list" ) Object list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } // spec requires us to return a new list final List<Object> result = new ArrayList<>(); if ( list instanceof Collection ) { for (Object o : (Collection) list) { if ( !result.contains( o ) ) { result.add(o); } } } else { result.add( list ); } return FEELFnResult.ofResult( result ); }
@Test void invokeParamNotCollection() { FunctionTestUtil.assertResultList( distinctValuesFunction.invoke(BigDecimal.valueOf(10.1)), Collections.singletonList(BigDecimal.valueOf(10.1))); }
public static String getHippo4jHome() { if (StringUtil.isBlank(hippo4jHomePath)) { hippo4jHomePath = System.getProperty(HIPPO4J_HOME_KEY); if (StringUtil.isBlank(hippo4jHomePath)) { hippo4jHomePath = Paths.get(System.getProperty("user.home"), "hippo4j").toString(); } } return hippo4jHomePath; }
@Test public void getHippo4jHomeTest() { String hippo4jHome = EnvUtil.getHippo4jHome(); Assert.isTrue(StringUtil.isNotBlank(hippo4jHome)); }
public static boolean overlapsOrdered(IndexIterationPointer left, IndexIterationPointer right, Comparator comparator) { assert left.isDescending() == right.isDescending() : "Cannot compare pointer with different directions"; assert left.lastEntryKeyData == null && right.lastEntryKeyData == null : "Can merge only initial pointers"; // fast path for the same instance if (left == right) { return true; } assert comparator.compare(left.from, right.from) <= 0 : "Pointers must be ordered"; // if one of the ends is +/-inf respectively -> overlap if (left.to == null || right.from == null) { return true; } // if given end is equal the ranges overlap (or at least are adjacent) // if at least one of the ranges is inclusive boolean eqOverlaps = left.isToInclusive() || right.isFromInclusive(); // Check non-inf values, do not need to check the other way around because pointers are ordered // Thanks to order we do not have to check `right.to`, we only need to check // if `right.from` belongs to `left` pointer range. // we must take into account inclusiveness, so we do not merge < X and > X ranges int rfCmpLt = comparator.compare(right.from, left.to); return eqOverlaps ? rfCmpLt <= 0 : rfCmpLt < 0; }
@Test void overlapsIsNull() { assertTrue(overlapsOrdered(IS_NULL, IS_NULL, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR), "IS NULL should overlap with itself"); assertTrue(overlapsOrdered(IS_NULL_DESC, IS_NULL_DESC, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR), "IS NULL should overlap with itself"); assertFalse(overlapsOrdered(IS_NULL, pointer(singleton(5)), OrderedIndexStore.SPECIAL_AWARE_COMPARATOR), "IS NULL should not overlap with singleton"); assertFalse(overlapsOrdered(IS_NULL_DESC, pointer(singleton(5), true), OrderedIndexStore.SPECIAL_AWARE_COMPARATOR), "IS NULL should not overlap with singleton"); assertTrue(overlapsOrdered(IS_NULL, IS_NOT_NULL, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR), "IS NULL should overlap with NOT NULL (they are adjacent)"); assertTrue(overlapsOrdered(IS_NULL_DESC, IS_NOT_NULL_DESC, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR), "IS NULL should overlap with NOT NULL (they are adjacent)"); }
public AdditionalServletWithClassLoader load( AdditionalServletMetadata metadata, String narExtractionDirectory) throws IOException { final File narFile = metadata.getArchivePath().toAbsolutePath().toFile(); NarClassLoader ncl = NarClassLoaderBuilder.builder() .narFile(narFile) .parentClassLoader(AdditionalServlet.class.getClassLoader()) .extractionDirectory(narExtractionDirectory) .build(); AdditionalServletDefinition def = getAdditionalServletDefinition(ncl); if (StringUtils.isBlank(def.getAdditionalServletClass())) { throw new IOException("Additional servlets `" + def.getName() + "` does NOT provide an " + "additional servlets implementation"); } try { Class additionalServletClass = ncl.loadClass(def.getAdditionalServletClass()); Object additionalServlet = additionalServletClass.getDeclaredConstructor().newInstance(); if (!(additionalServlet instanceof AdditionalServlet)) { throw new IOException("Class " + def.getAdditionalServletClass() + " does not implement additional servlet interface"); } AdditionalServlet servlet = (AdditionalServlet) additionalServlet; return new AdditionalServletWithClassLoader(servlet, ncl); } catch (Throwable t) { rethrowIOException(t); return null; } }
@Test(expectedExceptions = IOException.class) public void testLoadEventListenerWithWrongListenerClass() throws Exception { AdditionalServletDefinition def = new AdditionalServletDefinition(); def.setAdditionalServletClass(Runnable.class.getName()); def.setDescription("test-proxy-listener"); String archivePath = "/path/to/proxy/listener/nar"; AdditionalServletMetadata metadata = new AdditionalServletMetadata(); metadata.setDefinition(def); metadata.setArchivePath(Paths.get(archivePath)); NarClassLoader mockLoader = mock(NarClassLoader.class); when(mockLoader.getServiceDefinition(eq(AdditionalServletUtils.ADDITIONAL_SERVLET_FILE))) .thenReturn(ObjectMapperFactory.getYamlMapper().writer().writeValueAsString(def)); Class listenerClass = Runnable.class; when(mockLoader.loadClass(eq(Runnable.class.getName()))) .thenReturn(listenerClass); final NarClassLoaderBuilder mockedBuilder = mock(NarClassLoaderBuilder.class, RETURNS_SELF); when(mockedBuilder.build()).thenReturn(mockLoader); try (MockedStatic<NarClassLoaderBuilder> builder = Mockito.mockStatic(NarClassLoaderBuilder.class)) { builder.when(() -> NarClassLoaderBuilder.builder()).thenReturn(mockedBuilder); AdditionalServletUtils.load(metadata, ""); } }
public static BigDecimal round(double v, int scale) { return round(v, scale, RoundingMode.HALF_UP); }
@Test public void roundTest() { // 四舍 final String round1 = NumberUtil.roundStr(2.674, 2); final String round2 = NumberUtil.roundStr("2.674", 2); assertEquals("2.67", round1); assertEquals("2.67", round2); // 五入 final String round3 = NumberUtil.roundStr(2.675, 2); final String round4 = NumberUtil.roundStr("2.675", 2); assertEquals("2.68", round3); assertEquals("2.68", round4); // 四舍六入五成双 final String round31 = NumberUtil.roundStr(4.245, 2, RoundingMode.HALF_EVEN); final String round41 = NumberUtil.roundStr("4.2451", 2, RoundingMode.HALF_EVEN); assertEquals("4.24", round31); assertEquals("4.25", round41); // 补0 final String round5 = NumberUtil.roundStr(2.6005, 2); final String round6 = NumberUtil.roundStr("2.6005", 2); assertEquals("2.60", round5); assertEquals("2.60", round6); // 补0 final String round7 = NumberUtil.roundStr(2.600, 2); final String round8 = NumberUtil.roundStr("2.600", 2); assertEquals("2.60", round7); assertEquals("2.60", round8); }
@Override public PlanNode optimize( PlanNode maxSubplan, ConnectorSession session, VariableAllocator variableAllocator, PlanNodeIdAllocator idAllocator) { return rewriteWith(new Rewriter(session, idAllocator), maxSubplan); }
@Test public void testJdbcComputePushdownAll() { String table = "test_table"; String schema = "test_schema"; String expression = "(c1 + c2) - c2"; TypeProvider typeProvider = TypeProvider.copyOf(ImmutableMap.of("c1", BIGINT, "c2", BIGINT)); RowExpression rowExpression = sqlToRowExpressionTranslator.translateAndOptimize(expression(expression), typeProvider); Set<ColumnHandle> columns = Stream.of("c1", "c2").map(TestJdbcComputePushdown::integerJdbcColumnHandle).collect(Collectors.toSet()); PlanNode original = filter(jdbcTableScan(schema, table, BIGINT, "c1", "c2"), rowExpression); JdbcTableHandle jdbcTableHandle = new JdbcTableHandle(CONNECTOR_ID, new SchemaTableName(schema, table), CATALOG_NAME, schema, table); ConnectorSession session = new TestingConnectorSession(ImmutableList.of()); JdbcTableLayoutHandle jdbcTableLayoutHandle = new JdbcTableLayoutHandle(session.getSqlFunctionProperties(), jdbcTableHandle, TupleDomain.none(), Optional.of(new JdbcExpression("(('c1' + 'c2') - 'c2')"))); PlanNode actual = this.jdbcComputePushdown.optimize(original, session, null, ID_ALLOCATOR); assertPlanMatch( actual, PlanMatchPattern.filter(expression, JdbcTableScanMatcher.jdbcTableScanPattern(jdbcTableLayoutHandle, columns))); }
public static String getServiceName(NetworkService networkService) { if (isWebService(networkService) && networkService.hasSoftware()) { return Ascii.toLowerCase(networkService.getSoftware().getName()); } return Ascii.toLowerCase(networkService.getServiceName()); }
@Test public void getServiceName_whenWebServiceWithSoftware_returnsServiceName() { assertThat( NetworkServiceUtils.getServiceName( NetworkService.newBuilder() .setNetworkEndpoint(forIpAndPort("127.0.0.1", 22)) .setServiceName("http") .setSoftware(Software.newBuilder().setName("WordPress")) .build())) .isEqualTo("wordpress"); }
@Udf public <T> List<T> except( @UdfParameter(description = "Array of values") final List<T> left, @UdfParameter(description = "Array of exceptions") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> distinctRightValues = new HashSet<>(right); final Set<T> distinctLeftValues = new LinkedHashSet<>(left); return distinctLeftValues .stream() .filter(e -> !distinctRightValues.contains(e)) .collect(Collectors.toList()); }
@Test public void shouldReturnEmptyArrayIfAllExcepted() { final List<String> input1 = Arrays.asList("foo", " ", "foo", "bar"); final List<String> input2 = Arrays.asList("bar", " ", "foo", "extra"); final List<String> result = udf.except(input1, input2); assertThat(result.isEmpty(), is(true)); }
public String getFullUrl() { if (StrUtils.isNotEmpty(fullUrl)) { return fullUrl; } return fullUrl = createFullUrl(); }
@Test public void testFullUrlWithoutProtocol() { HttpRequest req0 = new HttpRequest.Builder() .url("localhost:8086/write") .params(MapUtils.of("k1", singletonList("v1"))) .get() .build(); Assert.assertEquals("http://localhost:8086/write?k1=v1", req0.getFullUrl()); HttpRequest req1 = new HttpRequest.Builder() .url("localhost:8086/write?") .params(MapUtils.of("k1", singletonList("v1"))) .get() .build(); Assert.assertEquals("http://localhost:8086/write?k1=v1", req1.getFullUrl()); HttpRequest req2 = new HttpRequest.Builder() .url("localhost:8086/write?k1=v1") .params(MapUtils.of("k2", singletonList("v2"))) .post("abcd") .build(); Assert.assertEquals("http://localhost:8086/write?k1=v1&k2=v2", req2.getFullUrl()); }
@Override public boolean isIndexed(QueryContext queryContext) { Index index = queryContext.matchIndex(attributeName, QueryContext.IndexMatchHint.PREFER_ORDERED); return index != null && index.isOrdered() && expressionCanBeUsedAsIndexPrefix(); }
@Test public void likePredicateIsNotIndexed_whenUnderscoreWildcardIsUsed() { QueryContext queryContext = mock(QueryContext.class); when(queryContext.matchIndex("this", QueryContext.IndexMatchHint.PREFER_ORDERED)).thenReturn(createIndex(IndexType.SORTED)); assertFalse(new LikePredicate("this", "string_").isIndexed(queryContext)); }
public static <F extends Future<Void>> Mono<Void> from(F future) { Objects.requireNonNull(future, "future"); if (future.isDone()) { if (!future.isSuccess()) { return Mono.error(FutureSubscription.wrapError(future.cause())); } return Mono.empty(); } return new ImmediateFutureMono<>(future); }
@Test void raceTestImmediateFutureMonoWithSuccess() { for (int i = 0; i < 1000; i++) { final TestSubscriber subscriber = new TestSubscriber(); final ImmediateEventExecutor eventExecutor = ImmediateEventExecutor.INSTANCE; final Promise<Void> promise = eventExecutor.newPromise(); RaceTestUtils.race(() -> FutureMono.from(promise) .subscribe(subscriber), subscriber::cancel, () -> promise.setSuccess(null)); assertThat(resolveListeners(promise)).isNullOrEmpty(); assertThat(subscriber.operations).first() .isEqualTo(TestSubscriber.Operation.ON_SUBSCRIBE); } }
public String getEmail() { return email; }
@Test public void should_have_no_arg_constructor() { assertThat(new GsonEmail().getEmail()).isEmpty(); }
public static <T extends PipelineOptions> T as(Class<T> klass) { return new Builder().as(klass); }
@Test public void testMultipleSettersAnnotatedWithDefault() throws Exception { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Found setters marked with @Default:"); expectedException.expectMessage( "property [other] should not be marked with @Default on [" + "org.apache.beam.sdk.options.PipelineOptionsFactoryTest$MultiSetterWithDefault]"); expectedException.expectMessage( "property [value] should not be marked with @Default on [" + "org.apache.beam.sdk.options.PipelineOptionsFactoryTest$SetterWithDefault]"); PipelineOptionsFactory.as(MultiSetterWithDefault.class); }
@Udf(description = "Returns a new string encoded using the outputEncoding ") public String encode( @UdfParameter( description = "The source string. If null, then function returns null.") final String str, @UdfParameter( description = "The input encoding." + " If null, then function returns null.") final String inputEncoding, @UdfParameter( description = "The output encoding." + " If null, then function returns null.") final String outputEncoding) { if (str == null || inputEncoding == null || outputEncoding == null) { return null; } final String encodedString = inputEncoding.toLowerCase() + outputEncoding.toLowerCase(); final Encode.Encoder encoder = ENCODER_MAP.get(encodedString); if (encoder == null) { throw new KsqlFunctionException("Supported input and output encodings are: " + "hex, utf8, ascii and base64"); } return encoder.apply(str); }
@Test public void shouldEncodeUtf8ToBase64() { assertThat(udf.encode("Example!", "utf8", "base64"), is("RXhhbXBsZSE=")); assertThat(udf.encode("Plant trees", "utf8", "base64"), is("UGxhbnQgdHJlZXM=")); assertThat(udf.encode("1 + 1 = 1", "utf8", "base64"), is("MSArIDEgPSAx")); assertThat(udf.encode("Ελλάδα", "utf8", "base64"), is("zpXOu867zqzOtM6x")); assertThat(udf.encode("Übermensch", "utf8", "base64"), is("w5xiZXJtZW5zY2g=")); }
@Override public List<GrantedAuthority> getAuthorities(JsonObject introspectionResponse) { List<GrantedAuthority> auth = new ArrayList<>(getAuthorities()); if (introspectionResponse.has("scope") && introspectionResponse.get("scope").isJsonPrimitive()) { String scopeString = introspectionResponse.get("scope").getAsString(); Set<String> scopes = OAuth2Utils.parseParameterList(scopeString); for (String scope : scopes) { auth.add(new SimpleGrantedAuthority("OAUTH_SCOPE_" + scope)); } } return auth; }
@Test public void testGetAuthoritiesJsonObject_withoutScopes() { List<GrantedAuthority> expected = new ArrayList<>(); expected.add(new SimpleGrantedAuthority("ROLE_API")); List<GrantedAuthority> authorities = granter.getAuthorities(introspectionResponse); assertTrue(authorities.containsAll(expected)); assertTrue(expected.containsAll(authorities)); }
public static String getName(Class cls) { Objects.requireNonNull(cls, "cls"); return cls.getName(); }
@Test void testGetName() { final String name = "java.lang.Integer"; Integer val = 1; assertEquals(name, ClassUtils.getName(val)); assertEquals(name, ClassUtils.getName(Integer.class)); assertEquals(name, ClassUtils.getCanonicalName(val)); assertEquals(name, ClassUtils.getCanonicalName(Integer.class)); assertEquals("Integer", ClassUtils.getSimpleName(val)); assertEquals("Integer", ClassUtils.getSimpleName(Integer.class)); }
public static Class<?> getLiteral(String className, String literal) { LiteralAnalyzer analyzer = ANALYZERS.get( className ); Class result = null; if ( analyzer != null ) { analyzer.validate( literal ); result = analyzer.getLiteral(); } return result; }
@Test public void testFloatingPoingLiteralFromJLS() { // The largest positive finite literal of type float is 3.4028235e38f. assertThat( getLiteral( float.class.getCanonicalName(), "3.4028235e38f" ) ).isNotNull(); // The smallest positive finite non-zero literal of type float is 1.40e-45f. assertThat( getLiteral( float.class.getCanonicalName(), "1.40e-45f" ) ).isNotNull(); // The largest positive finite literal of type double is 1.7976931348623157e308. assertThat( getLiteral( double.class.getCanonicalName(), "1.7976931348623157e308" ) ).isNotNull(); // The smallest positive finite non-zero literal of type double is 4.9e-324 assertThat( getLiteral( double.class.getCanonicalName(), "4.9e-324" ) ).isNotNull(); // some floats assertThat( getLiteral( float.class.getCanonicalName(), "3.1e1F" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "2.f" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), ".3f" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "0f" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "3.14f" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "6.022137e+23f" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "-3.14f" ) ).isNotNull(); // some doubles assertThat( getLiteral( double.class.getCanonicalName(), "1e1" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "1e+1" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "2." ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), ".3" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "0.0" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "3.14" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "-3.14" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "1e-9D" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "1e137" ) ).isNotNull(); // too large (infinitve) assertThat( getLiteral( float.class.getCanonicalName(), "3.4028235e38f" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "1.7976931348623157e308" ) ).isNotNull(); // too large (infinitve) assertThat( getLiteral( float.class.getCanonicalName(), "3.4028235e39f" ) ).isNull(); assertThat( getLiteral( double.class.getCanonicalName(), "1.7976931348623159e308" ) ).isNull(); // small assertThat( getLiteral( float.class.getCanonicalName(), "1.40e-45f" ) ).isNotNull(); assertThat( getLiteral( float.class.getCanonicalName(), "0x1.0p-149" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "4.9e-324" ) ).isNotNull(); assertThat( getLiteral( double.class.getCanonicalName(), "0x0.001P-1062d" ) ).isNotNull(); // too small assertThat( getLiteral( float.class.getCanonicalName(), "1.40e-46f" ) ).isNull(); assertThat( getLiteral( float.class.getCanonicalName(), "0x1.0p-150" ) ).isNull(); assertThat( getLiteral( double.class.getCanonicalName(), "4.9e-325" ) ).isNull(); assertThat( getLiteral( double.class.getCanonicalName(), "0x0.001p-1063d" ) ).isNull(); }
public static Key of(String key, ApplicationId appId) { return new StringKey(key, appId); }
@Test public void keysAreImmutable() { assertThatClassIsImmutableBaseClass(Key.class); // Will be a long based key, class is private so cannot be // accessed directly Key longKey = Key.of(0xabcdefL, NetTestTools.APP_ID); assertThatClassIsImmutable(longKey.getClass()); // Will be a String based key, class is private so cannot be // accessed directly. Key stringKey = Key.of("some key", NetTestTools.APP_ID); assertThatClassIsImmutable(stringKey.getClass()); }
MetricsType getMetricsType(String remaining) { String name = StringHelper.before(remaining, ":"); MetricsType type; if (name == null) { type = DEFAULT_METRICS_TYPE; } else { type = MetricsType.getByName(name); } if (type == null) { throw new RuntimeCamelException("Unknown metrics type \"" + name + "\""); } return type; }
@Test public void testGetMetricsType() { for (MetricsType type : EnumSet.allOf(MetricsType.class)) { assertThat(component.getMetricsType(type.toString() + ":metrics-name"), is(type)); } }
public static byte[] compress(String urlString) throws MalformedURLException { byte[] compressedBytes = null; if (urlString != null) { // Figure the compressed bytes can't be longer than the original string. byte[] byteBuffer = new byte[urlString.length()]; int byteBufferIndex = 0; Arrays.fill(byteBuffer, (byte) 0x00); Pattern urlPattern = Pattern.compile(EDDYSTONE_URL_REGEX); Matcher urlMatcher = urlPattern.matcher(urlString); if (urlMatcher.matches()) { // www. String wwwdot = urlMatcher.group(EDDYSTONE_URL_WWW_GROUP); boolean haswww = (wwwdot != null); // Protocol. String rawProtocol = urlMatcher.group(EDDYSTONE_URL_PROTOCOL_GROUP); String protocol = rawProtocol.toLowerCase(); if (protocol.equalsIgnoreCase(URL_PROTOCOL_HTTP)) { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTP_WWW : EDDYSTONE_URL_PROTOCOL_HTTP); } else { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTPS_WWW : EDDYSTONE_URL_PROTOCOL_HTTPS); } byteBufferIndex++; // Fully-qualified domain name (FQDN). This includes the hostname and any other components after the dots // but BEFORE the first single slash in the URL. byte[] hostnameBytes = urlMatcher.group(EDDYSTONE_URL_FQDN_GROUP).getBytes(); String rawHostname = new String(hostnameBytes); String hostname = rawHostname.toLowerCase(); String[] domains = hostname.split(Pattern.quote(".")); boolean consumedSlash = false; if (domains != null) { // Write the hostname/subdomains prior to the last one. If there's only one (e. g. http://localhost) // then that's the only thing to write out. byte[] periodBytes = {'.'}; int writableDomainsCount = (domains.length == 1 ? 1 : domains.length - 1); for (int domainIndex = 0; domainIndex < writableDomainsCount; domainIndex++) { // Write out leading period, if necessary. if (domainIndex > 0) { System.arraycopy(periodBytes, 0, byteBuffer, byteBufferIndex, periodBytes.length); byteBufferIndex += periodBytes.length; } byte[] domainBytes = domains[domainIndex].getBytes(); int domainLength = domainBytes.length; System.arraycopy(domainBytes, 0, byteBuffer, byteBufferIndex, domainLength); byteBufferIndex += domainLength; } // Is the TLD one that we can encode? if (domains.length > 1) { String tld = "." + domains[domains.length - 1]; String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); String encodableTLDCandidate = (slash == null ? tld : tld + slash); byte encodedTLDByte = encodedByteForTopLevelDomain(encodableTLDCandidate); if (encodedTLDByte != TLD_NOT_ENCODABLE) { byteBuffer[byteBufferIndex++] = encodedTLDByte; consumedSlash = (slash != null); } else { byte[] tldBytes = tld.getBytes(); int tldLength = tldBytes.length; System.arraycopy(tldBytes, 0, byteBuffer, byteBufferIndex, tldLength); byteBufferIndex += tldLength; } } } // Optional slash. if (! consumedSlash) { String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); if (slash != null) { int slashLength = slash.length(); System.arraycopy(slash.getBytes(), 0, byteBuffer, byteBufferIndex, slashLength); byteBufferIndex += slashLength; } } // Path. String path = urlMatcher.group(EDDYSTONE_URL_PATH_GROUP); if (path != null) { int pathLength = path.length(); System.arraycopy(path.getBytes(), 0, byteBuffer, byteBufferIndex, pathLength); byteBufferIndex += pathLength; } // Copy the result. compressedBytes = new byte[byteBufferIndex]; System.arraycopy(byteBuffer, 0, compressedBytes, 0, compressedBytes.length); } else { throw new MalformedURLException(); } } else { throw new MalformedURLException(); } return compressedBytes; }
@Test public void testCompressHttpsAndWWWInCaps() throws MalformedURLException { String testURL = "HTTPS://WWW.radiusnetworks.com"; byte[] expectedBytes = {0x01, 'r', 'a', 'd', 'i', 'u', 's', 'n', 'e', 't', 'w', 'o', 'r', 'k', 's', 0x07}; assertTrue(Arrays.equals(expectedBytes, UrlBeaconUrlCompressor.compress(testURL))); }
@Override public void close() throws IOException { boolean triedToClose = false, success = false; try { flush(); ((FileOutputStream)out).getChannel().force(true); triedToClose = true; super.close(); success = true; } finally { if (success) { boolean renamed = tmpFile.renameTo(origFile); if (!renamed) { // On windows, renameTo does not replace. if (origFile.exists()) { try { Files.delete(origFile.toPath()); } catch (IOException e) { throw new IOException("Could not delete original file " + origFile, e); } } try { NativeIO.renameTo(tmpFile, origFile); } catch (NativeIOException e) { throw new IOException("Could not rename temporary file " + tmpFile + " to " + origFile + " due to failure in native rename. " + e.toString()); } } } else { if (!triedToClose) { // If we failed when flushing, try to close it to not leak an FD IOUtils.closeStream(out); } // close wasn't successful, try to delete the tmp file if (!tmpFile.delete()) { LOG.warn("Unable to delete tmp file " + tmpFile); } } } }
@Test public void testFailToRename() throws IOException { assumeWindows(); OutputStream fos = null; try { fos = new AtomicFileOutputStream(DST_FILE); fos.write(TEST_STRING.getBytes()); FileUtil.setWritable(TEST_DIR, false); exception.expect(IOException.class); exception.expectMessage("failure in native rename"); try { fos.close(); } finally { fos = null; } } finally { IOUtils.cleanupWithLogger(null, fos); FileUtil.setWritable(TEST_DIR, true); } }
public void mirrorKeys() { /* how to mirror? width = 55 [0..15] [20..35] [40..55] phase 1: multiple by -1 [0] [-20] [-40] phase 2: add keyboard width [55] [35] [15] phase 3: subtracting the key's width [40] [20] [0] cool? */ final int keyboardWidth = getMinWidth(); for (Key k : getKeys()) { var newX = -1 * k.x; // phase 1 newX += keyboardWidth; // phase 2 newX -= k.width; // phase 3 k.x = newX; } }
@Test public void testKeyboardPopupSupportsMirrorOneRow() throws Exception { String popupCharacters = "qwert"; AnyPopupKeyboard keyboard = new AnyPopupKeyboard( new DefaultAddOn(getApplicationContext(), getApplicationContext()), getApplicationContext(), popupCharacters, SIMPLE_KeyboardDimens, "POP_KEYBOARD"); int vGap = (int) SIMPLE_KeyboardDimens.getRowVerticalGap(); int hGap = (int) SIMPLE_KeyboardDimens.getKeyHorizontalGap(); final int keyWidth = (int) (SIMPLE_KeyboardDimens.getKeyboardMaxWidth() - SIMPLE_KeyboardDimens.getKeyHorizontalGap() * popupCharacters.length()) / 10; assertKeyValues(keyboard, 'q', vGap, 0); assertKeyValues(keyboard, 'w', vGap, keyWidth); assertKeyValues(keyboard, 'e', vGap, hGap + 2 * keyWidth); assertKeyValues(keyboard, 'r', vGap, 2 * hGap + 3 * keyWidth); assertKeyValues(keyboard, 't', vGap, 3 * hGap + 4 * keyWidth); keyboard.mirrorKeys(); // same order, mirrored X position assertKeyValues(keyboard, 'q', vGap, 5 * hGap + 4 * keyWidth); assertKeyValues(keyboard, 'w', vGap, 4 * hGap + 3 * keyWidth); assertKeyValues(keyboard, 'e', vGap, 3 * hGap + 2 * keyWidth); assertKeyValues(keyboard, 'r', vGap, 2 * hGap + keyWidth); assertKeyValues(keyboard, 't', vGap, hGap); }
public String cleanURL(String entry) { String url = entry; if (entry.contains("\"") && checkMethod(entry)) { // we tokenize using double quotes. this means // for tomcat we should have 3 tokens if there // isn't any additional information in the logs StringTokenizer tokens = this.tokenize(entry, "\""); while (tokens.hasMoreTokens()) { String token = tokens.nextToken(); if (checkMethod(token)) { // we tokenzie it using space and escape // the while loop. Only the first matching // token will be used StringTokenizer token2 = this.tokenize(token, " "); while (token2.hasMoreTokens()) { String t = (String) token2.nextElement(); if (t.equalsIgnoreCase(GET)) { RMETHOD = GET; } else if (t.equalsIgnoreCase(POST)) { RMETHOD = POST; } else if (t.equalsIgnoreCase(HEAD)) { RMETHOD = HEAD; } // there should only be one token // that starts with slash character if (t.startsWith("/")) { url = t; break; } } break; } } return url; } // we return the original string return url; }
@Test public void testcleanURL() throws Exception { String res = tclp.cleanURL(URL1); assertEquals("/addrbook/", res); assertNull(tclp.stripFile(res, new HTTPNullSampler())); }
public void validate(final Metric metric) { if (metric == null) { throw new ValidationException("Metric cannot be null"); } if (!isValidFunction(metric.functionName())) { throw new ValidationException("Unrecognized metric : " + metric.functionName() + ", valid metrics : " + availableMetricTypes); } if (!hasFieldIfFunctionNeedsIt(metric)) { throw new ValidationException(metric.functionName() + " metric requires field name to be provided after a colon, i.e. " + metric.functionName() + ":http_status_code"); } if (metric.sort() != null && UNSORTABLE_METRICS.contains(metric.functionName())) { throw new ValidationException(metric.functionName() + " metric cannot be used to sort aggregations"); } }
@Test void throwsExceptionOnMetricMissingFieldName() { //check count metric is fine with no field toTest.validate(new Metric("count", null, SortSpec.Direction.Ascending, null)); //check other metrics throw exception on missing field assertThrows(ValidationException.class, () -> toTest.validate(new Metric("avg", "", SortSpec.Direction.Ascending, null))); assertThrows(ValidationException.class, () -> toTest.validate(new Metric("avg", null, SortSpec.Direction.Ascending, null))); }
public static S3SignRequest fromJson(String json) { return JsonUtil.parse(json, S3SignRequestParser::fromJson); }
@Test public void missingFields() { assertThatThrownBy(() -> S3SignRequestParser.fromJson("{}")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing string: region"); assertThatThrownBy(() -> S3SignRequestParser.fromJson("{\"region\":\"us-west-2\"}")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing string: method"); assertThatThrownBy( () -> S3SignRequestParser.fromJson("{\"region\":\"us-west-2\", \"method\" : \"PUT\"}")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing string: uri"); assertThatThrownBy( () -> S3SignRequestParser.fromJson( "{\n" + " \"region\" : \"us-west-2\",\n" + " \"method\" : \"PUT\",\n" + " \"uri\" : \"http://localhost:49208/iceberg-signer-test\"\n" + "}")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing field: headers"); }
public SqlType getExpressionSqlType(final Expression expression) { return getExpressionSqlType(expression, Collections.emptyMap()); }
@Test public void shouldEvaluateLambdaArgsToType() { // Given: givenUdfWithNameAndReturnType("TRANSFORM", SqlTypes.STRING); when(function.parameters()).thenReturn( ImmutableList.of( ArrayType.of(DoubleType.INSTANCE), StringType.INSTANCE, LambdaType.of( ImmutableList.of( DoubleType.INSTANCE, StringType.INSTANCE ), StringType.INSTANCE))); final Expression expression = new FunctionCall( FunctionName.of("TRANSFORM"), ImmutableList.of( ARRAYCOL, new StringLiteral("Q"), new LambdaFunctionCall( ImmutableList.of("A", "B"), new ArithmeticBinaryExpression( Operator.ADD, new LambdaVariable("A"), new LambdaVariable("B")) ))); // When: final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> expressionTypeManager.getExpressionSqlType(expression) ); // Then: assertThat(e.getUnloggedMessage(), Matchers.containsString("Error processing expression: (A + B). Unsupported arithmetic types. DOUBLE STRING" + System.lineSeparator() + "Statement: (A + B)")); assertThat(e.getMessage(), Matchers.is( "Error processing expression.")); }
@PostMapping("/token") @PermitAll @Operation(summary = "获得访问令牌", description = "适合 code 授权码模式,或者 implicit 简化模式;在 sso.vue 单点登录界面被【获取】调用") @Parameters({ @Parameter(name = "grant_type", required = true, description = "授权类型", example = "code"), @Parameter(name = "code", description = "授权范围", example = "userinfo.read"), @Parameter(name = "redirect_uri", description = "重定向 URI", example = "https://www.iocoder.cn"), @Parameter(name = "state", description = "状态", example = "1"), @Parameter(name = "username", example = "tudou"), @Parameter(name = "password", example = "cai"), // 多个使用空格分隔 @Parameter(name = "scope", example = "user_info"), @Parameter(name = "refresh_token", example = "123424233"), }) public CommonResult<OAuth2OpenAccessTokenRespVO> postAccessToken(HttpServletRequest request, @RequestParam("grant_type") String grantType, @RequestParam(value = "code", required = false) String code, // 授权码模式 @RequestParam(value = "redirect_uri", required = false) String redirectUri, // 授权码模式 @RequestParam(value = "state", required = false) String state, // 授权码模式 @RequestParam(value = "username", required = false) String username, // 密码模式 @RequestParam(value = "password", required = false) String password, // 密码模式 @RequestParam(value = "scope", required = false) String scope, // 密码模式 @RequestParam(value = "refresh_token", required = false) String refreshToken) { // 刷新模式 List<String> scopes = OAuth2Utils.buildScopes(scope); // 1.1 校验授权类型 OAuth2GrantTypeEnum grantTypeEnum = OAuth2GrantTypeEnum.getByGranType(grantType); if (grantTypeEnum == null) { throw exception0(BAD_REQUEST.getCode(), StrUtil.format("未知授权类型({})", grantType)); } if (grantTypeEnum == OAuth2GrantTypeEnum.IMPLICIT) { throw exception0(BAD_REQUEST.getCode(), "Token 接口不支持 implicit 授权模式"); } // 1.2 校验客户端 String[] clientIdAndSecret = obtainBasicAuthorization(request); OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientIdAndSecret[0], clientIdAndSecret[1], grantType, scopes, redirectUri); // 2. 根据授权模式,获取访问令牌 OAuth2AccessTokenDO accessTokenDO; switch (grantTypeEnum) { case AUTHORIZATION_CODE: accessTokenDO = oauth2GrantService.grantAuthorizationCodeForAccessToken(client.getClientId(), code, redirectUri, state); break; case PASSWORD: accessTokenDO = oauth2GrantService.grantPassword(username, password, client.getClientId(), scopes); break; case CLIENT_CREDENTIALS: accessTokenDO = oauth2GrantService.grantClientCredentials(client.getClientId(), scopes); break; case REFRESH_TOKEN: accessTokenDO = oauth2GrantService.grantRefreshToken(refreshToken, client.getClientId()); break; default: throw new IllegalArgumentException("未知授权类型:" + grantType); } Assert.notNull(accessTokenDO, "访问令牌不能为空"); // 防御性检查 return success(OAuth2OpenConvert.INSTANCE.convert(accessTokenDO)); }
@Test public void testPostAccessToken_password() { // 准备参数 String granType = OAuth2GrantTypeEnum.PASSWORD.getGrantType(); String username = randomString(); String password = randomString(); String scope = "write read"; HttpServletRequest request = mockRequest("test_client_id", "test_client_secret"); // mock 方法(client) OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("test_client_id"); when(oauth2ClientService.validOAuthClientFromCache(eq("test_client_id"), eq("test_client_secret"), eq(granType), eq(Lists.newArrayList("write", "read")), isNull())).thenReturn(client); // mock 方法(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class) .setExpiresTime(LocalDateTimeUtil.offset(LocalDateTime.now(), 30000L, ChronoUnit.MILLIS)); when(oauth2GrantService.grantPassword(eq(username), eq(password), eq("test_client_id"), eq(Lists.newArrayList("write", "read")))).thenReturn(accessTokenDO); // 调用 CommonResult<OAuth2OpenAccessTokenRespVO> result = oauth2OpenController.postAccessToken(request, granType, null, null, null, username, password, scope, null); // 断言 assertEquals(0, result.getCode()); assertPojoEquals(accessTokenDO, result.getData()); assertTrue(ObjectUtils.equalsAny(result.getData().getExpiresIn(), 29L, 30L)); // 执行过程会过去几毫秒 }
@Override public String toString() { return messageListener.toString(); }
@Test public void test_toString() { MessageListener<String> listener = createMessageListenerMock(); when(listener.toString()).thenReturn("foobar"); ReliableMessageListenerAdapter<String> adapter = new ReliableMessageListenerAdapter<>(listener); assertEquals("foobar", adapter.toString()); }
public String hash() { try (LockResource r = new LockResource(mLock.readLock())) { return mHash.get(); } }
@Test public void hash() { PathProperties properties = new PathProperties(); String hash0 = properties.hash(); properties.add(NoopJournalContext.INSTANCE, ROOT, READ_CACHE); String hash1 = properties.hash(); Assert.assertNotEquals(hash0, hash1); properties.add(NoopJournalContext.INSTANCE, DIR1, READ_CACHE_WRITE_CACHE_THROUGH); String hash2 = properties.hash(); Assert.assertNotEquals(hash0, hash2); Assert.assertNotEquals(hash1, hash2); Set<String> keys = new HashSet<>(); keys.add(PropertyKey.USER_FILE_READ_TYPE_DEFAULT.getName()); properties.remove(NoopJournalContext.INSTANCE, DIR1, keys); String hash3 = properties.hash(); Assert.assertNotEquals(hash0, hash3); Assert.assertNotEquals(hash1, hash3); Assert.assertNotEquals(hash2, hash3); properties.removeAll(NoopJournalContext.INSTANCE, DIR1); String hash4 = properties.hash(); Assert.assertEquals(hash1, hash4); properties.removeAll(NoopJournalContext.INSTANCE, ROOT); Assert.assertEquals(hash0, properties.hash()); }
@Override public Message request(final Message msg, final long timeout) throws RequestTimeoutException, MQClientException, RemotingException, MQBrokerException, InterruptedException { msg.setTopic(withNamespace(msg.getTopic())); return this.defaultMQProducerImpl.request(msg, timeout); }
@Test public void assertRequest() throws MQBrokerException, RemotingException, InterruptedException, MQClientException, NoSuchFieldException, IllegalAccessException, RequestTimeoutException { setDefaultMQProducerImpl(); MessageQueueSelector selector = mock(MessageQueueSelector.class); Message replyNsg = producer.request(message, selector, 1, defaultTimeout); assertNull(replyNsg); RequestCallback requestCallback = mock(RequestCallback.class); producer.request(message, selector, 1, requestCallback, defaultTimeout); MessageQueue mq = mock(MessageQueue.class); producer.request(message, mq, defaultTimeout); producer.request(message, mq, requestCallback, defaultTimeout); }
@Bean @ConditionalOnMissingBean(EtcdDataChangedInit.class) public DataChangedInit etcdDataChangedInit(final EtcdClient etcdClient) { return new EtcdDataChangedInit(etcdClient); }
@Test public void testEtcdDataInit() { EtcdSyncConfiguration etcdListener = new EtcdSyncConfiguration(); EtcdClient client = mock(EtcdClient.class); assertNotNull(etcdListener.etcdDataChangedInit(client)); }
@Override public boolean canPass(Node node, int acquireCount) { return canPass(node, acquireCount, false); }
@Test public void testThrottlingControllerQueueTimeout() throws InterruptedException { final ThrottlingController paceController = new ThrottlingController(500, 10d); final Node node = mock(Node.class); final AtomicInteger passCount = new AtomicInteger(); final AtomicInteger blockCount = new AtomicInteger(); final CountDownLatch countDown = new CountDownLatch(1); final AtomicInteger done = new AtomicInteger(); for (int i = 0; i < 10; i++) { Thread thread = new Thread(new Runnable() { @Override public void run() { boolean pass = paceController.canPass(node, 1); if (pass) { passCount.incrementAndGet(); } else { blockCount.incrementAndGet(); } done.incrementAndGet(); if (done.get() >= 10) { countDown.countDown(); } } }, "Thread-TestThrottlingControllerQueueTimeout-" + i); thread.start(); } countDown.await(); System.out.println("pass: " + passCount.get()); System.out.println("block: " + blockCount.get()); System.out.println("done: " + done.get()); assertTrue(blockCount.get() > 0); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 7) { onInvalidDataReceived(device, data); return; } // First byte: flags int offset = 0; final int flags = data.getIntValue(Data.FORMAT_UINT8, offset++); // See UNIT_* for unit options final int unit = (flags & 0x01) == UNIT_mmHg ? UNIT_mmHg : UNIT_kPa; final boolean timestampPresent = (flags & 0x02) != 0; final boolean pulseRatePresent = (flags & 0x04) != 0; final boolean userIdPresent = (flags & 0x08) != 0; final boolean measurementStatusPresent = (flags & 0x10) != 0; if (data.size() < 7 + (timestampPresent ? 7 : 0) + (pulseRatePresent ? 2 : 0) + (userIdPresent ? 1 : 0) + (measurementStatusPresent ? 2 : 0)) { onInvalidDataReceived(device, data); return; } // Following bytes - systolic, diastolic and mean arterial pressure final float cuffPressure = data.getFloatValue(Data.FORMAT_SFLOAT, offset); // final float ignored_1 = data.getFloatValue(Data.FORMAT_SFLOAT, offset + 2); // final float ignored_2 = data.getFloatValue(Data.FORMAT_SFLOAT, offset + 4); offset += 6; // Parse timestamp if present Calendar calendar = null; if (timestampPresent) { calendar = DateTimeDataCallback.readDateTime(data, offset); offset += 7; } // Parse pulse rate if present Float pulseRate = null; if (pulseRatePresent) { pulseRate = data.getFloatValue(Data.FORMAT_SFLOAT, offset); offset += 2; } // Read user id if present Integer userId = null; if (userIdPresent) { userId = data.getIntValue(Data.FORMAT_UINT8, offset); offset += 1; } // Read measurement status if present BPMStatus status = null; if (measurementStatusPresent) { final int measurementStatus = data.getIntValue(Data.FORMAT_UINT16_LE, offset); // offset += 2; status = new BPMStatus(measurementStatus); } onIntermediateCuffPressureReceived(device, cuffPressure, unit, pulseRate, userId, status, calendar); }
@Test public void onIntermediateCuffPressureReceived_minimal() { final DataReceivedCallback callback = new IntermediateCuffPressureDataCallback() { @Override public void onIntermediateCuffPressureReceived(@NonNull final BluetoothDevice device, final float cuffPressure, final int unit, @Nullable final Float pulseRate, @Nullable final Integer userID, @Nullable final BPMStatus status, @Nullable final Calendar calendar) { assertEquals("Cuff pressure", 21.5, cuffPressure, 0.01); assertEquals("Unit: mmHg", 0, unit); assertNull("Pulse rate not set", pulseRate); assertNull("User ID not set", userID); assertNull("Status not set", status); assertNull("Calendar not set", calendar); } @Override public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { assertEquals("Correct ICP reported as invalid", 1, 2); } }; final MutableData data = new MutableData(new byte[7]); // Flags data.setByte(0b00000, 0); // Systolic, diastolic and mean AP in mmHg data.setValue(215, -1, Data.FORMAT_SFLOAT, 1); assertArrayEquals( new byte[] { 0x00, (byte) 0xD7, (byte) 0xF0, 0x00, 0x00, 0x00, 0x00 }, data.getValue() ); callback.onDataReceived(null, data); }
protected final static List<String> splitStringPreserveDelimiter(String str, Pattern SPLIT_PATTERN) { List<String> list = new ArrayList<>(); if (str != null) { Matcher matcher = SPLIT_PATTERN.matcher(str); int pos = 0; while (matcher.find()) { if (pos < matcher.start()) { list.add(str.substring(pos, matcher.start())); } list.add(matcher.group()); pos = matcher.end(); } if (pos < str.length()) { list.add(str.substring(pos)); } } return list; }
@Test public void testSplitString() { List<String> list = DiffRowGenerator.splitStringPreserveDelimiter("test,test2", DiffRowGenerator.SPLIT_BY_WORD_PATTERN); assertEquals(3, list.size()); assertEquals("[test, ,, test2]", list.toString()); }
@Override public void getConfig(StorServerConfig.Builder builder) { super.getConfig(builder); provider.getConfig(builder); }
@Test void testThatGroupsAreCountedInWhenComputingSplitBits() { StorDistributormanagerConfig.Builder builder = new StorDistributormanagerConfig.Builder(); ContentCluster cluster = parseCluster("<cluster id=\"storage\">\n" + " <redundancy>3</redundancy>" + " <documents/>" + " <tuning>" + " <distribution type=\"legacy\"/>" + " </tuning>\n" + " <group>" + " <node distribution-key=\"0\" hostalias=\"mockhost\"/>" + " <node distribution-key=\"1\" hostalias=\"mockhost\"/>" + " </group>" + "</cluster>"); cluster.getConfig(builder); StorDistributormanagerConfig conf = new StorDistributormanagerConfig(builder); assertEquals(1024, conf.splitcount()); assertEquals(512, conf.joincount()); assertEquals(16772216, conf.splitsize()); assertEquals(16000000, conf.joinsize()); assertEquals(8, conf.minsplitcount()); assertTrue(conf.inlinebucketsplitting()); cluster = parseCluster("<cluster id=\"storage\">\n" + " <redundancy>2</redundancy>" + " <documents/>" + " <tuning>" + " <distribution type=\"legacy\"/>" + " </tuning>\n" + " <group>" + " <distribution partitions=\"1|*\"/>" + " <group name=\"a\" distribution-key=\"0\">" + " <node distribution-key=\"0\" hostalias=\"mockhost\"/>" + " <node distribution-key=\"1\" hostalias=\"mockhost\"/>" + " <node distribution-key=\"2\" hostalias=\"mockhost\"/>" + " </group>" + " <group name=\"b\" distribution-key=\"1\">" + " <node distribution-key=\"3\" hostalias=\"mockhost\"/>" + " <node distribution-key=\"4\" hostalias=\"mockhost\"/>" + " <node distribution-key=\"5\" hostalias=\"mockhost\"/>" + " </group>" + " </group>" + "</cluster>"); cluster.getConfig(builder); conf = new StorDistributormanagerConfig(builder); assertEquals(1024, conf.splitcount()); assertEquals(512, conf.joincount()); assertEquals(16772216, conf.splitsize()); assertEquals(16000000, conf.joinsize()); assertEquals(14, conf.minsplitcount()); assertTrue(conf.inlinebucketsplitting()); }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForCreateResourceGroup() { MySQLCreateResourceGroupStatement resourceGroupStatement = mock(MySQLCreateResourceGroupStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(resourceGroupStatement); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingInstanceBroadcastRoutingEngine.class)); }
@Override public void launch() throws IOException { type.assertFull(); String numaId = SupervisorUtils.getNumaIdForPort(port, conf); if (numaId == null) { LOG.info("Launching worker with assignment {} for this supervisor {} on port {} with id {}", assignment, supervisorId, port, workerId); } else { LOG.info("Launching worker with assignment {} for this supervisor {} on port {} with id {} bound to numa zone {}", assignment, supervisorId, port, workerId, numaId); } exitedEarly = false; final WorkerResources resources = assignment.get_resources(); final int memOnHeap = getMemOnHeap(resources); final int memOffHeap = getMemOffHeap(resources); memoryLimitMb = calculateMemoryLimit(resources, memOnHeap); final String stormRoot = ConfigUtils.supervisorStormDistRoot(conf, topologyId); String jlp = javaLibraryPath(stormRoot, conf); Map<String, String> topEnvironment = new HashMap<String, String>(); @SuppressWarnings("unchecked") Map<String, String> environment = (Map<String, String>) topoConf.get(Config.TOPOLOGY_ENVIRONMENT); if (environment != null) { topEnvironment.putAll(environment); } String ldLibraryPath = topEnvironment.get("LD_LIBRARY_PATH"); if (ldLibraryPath != null) { jlp = jlp + System.getProperty("path.separator") + ldLibraryPath; } topEnvironment.put("LD_LIBRARY_PATH", jlp); if (resourceIsolationManager.isResourceManaged()) { final int cpu = (int) Math.ceil(resources.get_cpu()); //Save the memory limit so we can enforce it less strictly resourceIsolationManager.reserveResourcesForWorker(workerId, (int) memoryLimitMb, cpu, numaId); } List<String> commandList = mkLaunchCommand(memOnHeap, memOffHeap, stormRoot, jlp, numaId); LOG.info("Launching worker with command: {}. ", ServerUtils.shellCmd(commandList)); String workerDir = ConfigUtils.workerRoot(conf, workerId); String logPrefix = "Worker Process " + workerId; ProcessExitCallback processExitCallback = new ProcessExitCallback(logPrefix); resourceIsolationManager.launchWorkerProcess(getWorkerUser(), topologyId, topoConf, port, workerId, commandList, topEnvironment, logPrefix, processExitCallback, new File(workerDir)); }
@Test public void testLaunchStorm1version() throws Exception { final String topoId = "test_topology_storm_1.x"; final int supervisorPort = 6628; final int port = 8080; final String stormHome = ContainerTest.asAbsPath("tmp", "storm-home"); final String stormLogDir = ContainerTest.asFile(".", "target").getCanonicalPath(); final String workerId = "worker-id"; final String stormLocal = ContainerTest.asAbsPath("tmp", "storm-local"); final String distRoot = ContainerTest.asAbsPath(stormLocal, "supervisor", "stormdist", topoId); final File stormcode = new File(distRoot, "stormcode.ser"); final File stormjar = new File(distRoot, "stormjar.jar"); final String log4jdir = ContainerTest.asAbsPath(stormHome, "conf"); final String workerConf = ContainerTest.asAbsPath(log4jdir, "worker.xml"); final String workerRoot = ContainerTest.asAbsPath(stormLocal, "workers", workerId); final String workerTmpDir = ContainerTest.asAbsPath(workerRoot, "tmp"); final StormTopology st = new StormTopology(); st.set_spouts(new HashMap<>()); st.set_bolts(new HashMap<>()); st.set_state_spouts(new HashMap<>()); // minimum 1.x version of supporting STORM-2448 would be 1.0.4 st.set_storm_version("1.0.4"); byte[] serializedState = Utils.gzip(Utils.thriftSerialize(st)); final Map<String, Object> superConf = new HashMap<>(); superConf.put(Config.STORM_LOCAL_DIR, stormLocal); superConf.put(Config.STORM_WORKERS_ARTIFACTS_DIR, stormLocal); superConf.put(DaemonConfig.STORM_LOG4J2_CONF_DIR, log4jdir); superConf.put(Config.WORKER_CHILDOPTS, " -Dtesting=true"); LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); when(ops.slurp(stormcode)).thenReturn(serializedState); LocalState ls = mock(LocalState.class); MockResourceIsolationManager iso = new MockResourceIsolationManager(); checkpoint(() -> { MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", supervisorPort, port, la, iso, ls, workerId, new StormMetricsRegistry(), new HashMap<>(), ops, "profile"); mc.launch(); assertEquals(1, iso.workerCmds.size()); CommandRun cmd = iso.workerCmds.get(0); iso.workerCmds.clear(); assertListEquals(Arrays.asList( "java", "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dworker.memory_limit_mb=768", "-Dlog4j.configurationFile=" + workerConf, "org.apache.storm.LogWriter", "java", "-server", "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dworker.memory_limit_mb=768", "-Dlog4j.configurationFile=" + workerConf, "-Dtesting=true", "-Djava.library.path=JLP", "-Dstorm.conf.file=", "-Dstorm.options=", "-Djava.io.tmpdir=" + workerTmpDir, "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "org.apache.storm.daemon.worker", topoId, "SUPERVISOR", String.valueOf(port), workerId ), cmd.cmd); assertEquals(new File(workerRoot), cmd.pwd); }, ConfigUtils.STORM_HOME, stormHome, "storm.log.dir", stormLogDir); }
public static String extractCharset(String line, String defaultValue) { if (line == null) { return defaultValue; } final String[] parts = line.split(" "); String charsetInfo = ""; for (var part : parts) { if (part.startsWith("charset")) { charsetInfo = part; break; } } final String charset = charsetInfo.replace("charset=", "").replace(";", ""); if (charset.isBlank()) { return defaultValue; } return charset; }
@DisplayName("default charset information") @Test void testExtractCharset() { assertEquals("UTF-8", TelegramAsyncHandler.extractCharset("Content-Type: text/plain; charset=UTF-8", StandardCharsets.US_ASCII.name())); }
@Override public ReservationListResponse listReservations( ReservationListRequest requestInfo) throws YarnException, IOException { // Check if reservation system is enabled checkReservationSystem(); ReservationListResponse response = recordFactory.newRecordInstance(ReservationListResponse.class); Plan plan = rValidator.validateReservationListRequest( reservationSystem, requestInfo); boolean includeResourceAllocations = requestInfo .getIncludeResourceAllocations(); ReservationId reservationId = null; if (requestInfo.getReservationId() != null && !requestInfo .getReservationId().isEmpty()) { reservationId = ReservationId.parseReservationId( requestInfo.getReservationId()); } checkReservationACLs(requestInfo.getQueue(), AuditConstants.LIST_RESERVATION_REQUEST, reservationId); long startTime = Math.max(requestInfo.getStartTime(), 0); long endTime = requestInfo.getEndTime() <= -1? Long.MAX_VALUE : requestInfo .getEndTime(); Set<ReservationAllocation> reservations; reservations = plan.getReservations(reservationId, new ReservationInterval( startTime, endTime)); List<ReservationAllocationState> info = ReservationSystemUtil.convertAllocationsToReservationInfo( reservations, includeResourceAllocations); response.setReservationAllocationState(info); return response; }
@Test public void testListReservationsByTimeInterval() { resourceManager = setupResourceManager(); ClientRMService clientService = resourceManager.getClientRMService(); Clock clock = new UTCClock(); long arrival = clock.getTime(); long duration = 60000; long deadline = (long) (arrival + 1.05 * duration); ReservationSubmissionRequest sRequest = submitReservationTestHelper(clientService, arrival, deadline, duration); // List reservations, search by a point in time within the reservation // range. arrival = clock.getTime(); ReservationId reservationID = sRequest.getReservationId(); ReservationListRequest request = ReservationListRequest.newInstance( ReservationSystemTestUtil.reservationQ, "", arrival + duration / 2, arrival + duration / 2, true); ReservationListResponse response = null; try { response = clientService.listReservations(request); } catch (Exception e) { Assert.fail(e.getMessage()); } Assert.assertNotNull(response); Assert.assertEquals(1, response.getReservationAllocationState().size()); Assert.assertEquals(response.getReservationAllocationState().get(0) .getReservationId().getId(), reservationID.getId()); // List reservations, search by time within reservation interval. request = ReservationListRequest.newInstance( ReservationSystemTestUtil.reservationQ, "", 1, Long.MAX_VALUE, true); response = null; try { response = clientService.listReservations(request); } catch (Exception e) { Assert.fail(e.getMessage()); } Assert.assertNotNull(response); Assert.assertEquals(1, response.getReservationAllocationState().size()); Assert.assertEquals(response.getReservationAllocationState().get(0) .getReservationId().getId(), reservationID.getId()); // Verify that the full resource allocations exist. Assert.assertTrue(response.getReservationAllocationState().get(0) .getResourceAllocationRequests().size() > 0); // Verify that the full RDL is returned. ReservationRequests reservationRequests = response.getReservationAllocationState().get(0) .getReservationDefinition().getReservationRequests(); Assert.assertEquals("R_ALL", reservationRequests.getInterpreter().toString()); Assert.assertTrue(reservationRequests.getReservationResources().get(0) .getDuration() == duration); }
@Private @VisibleForTesting static void checkResourceRequestAgainstAvailableResource(Resource reqResource, Resource availableResource) throws InvalidResourceRequestException { for (int i = 0; i < ResourceUtils.getNumberOfCountableResourceTypes(); i++) { final ResourceInformation requestedRI = reqResource.getResourceInformation(i); final String reqResourceName = requestedRI.getName(); if (requestedRI.getValue() < 0) { throwInvalidResourceException(reqResource, availableResource, reqResourceName, InvalidResourceType.LESS_THAN_ZERO); } boolean valid = checkResource(requestedRI, availableResource); if (!valid) { throwInvalidResourceException(reqResource, availableResource, reqResourceName, InvalidResourceType.GREATER_THEN_MAX_ALLOCATION); } } }
@Test public void testCustomResourceRequestedUnitIsSmallerThanAvailableUnit() throws InvalidResourceRequestException { Resource requestedResource = ResourceTypesTestHelper.newResource(1, 1, ImmutableMap.of("custom-resource-1", "11")); Resource availableResource = ResourceTypesTestHelper.newResource(1, 1, ImmutableMap.of("custom-resource-1", "0G")); exception.expect(InvalidResourceRequestException.class); exception.expectMessage(InvalidResourceRequestExceptionMessageGenerator .create().withRequestedResourceType("custom-resource-1") .withRequestedResource(requestedResource) .withAvailableAllocation(availableResource) .withMaxAllocation(configuredMaxAllocation) .withInvalidResourceType(GREATER_THEN_MAX_ALLOCATION) .build()); SchedulerUtils.checkResourceRequestAgainstAvailableResource( requestedResource, availableResource); }
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) { ProjectMeasuresQuery query = new ProjectMeasuresQuery(); Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids); criteria.forEach(criterion -> processCriterion(criterion, query)); return query; }
@Test public void filter_no_data() { List<Criterion> criteria = singletonList(Criterion.builder().setKey("duplicated_lines_density").setOperator(EQ).setValue("NO_DATA").build()); ProjectMeasuresQuery underTest = newProjectMeasuresQuery(criteria, emptySet()); assertThat(underTest.getMetricCriteria()) .extracting(MetricCriterion::getMetricKey, MetricCriterion::isNoData) .containsOnly(tuple("duplicated_lines_density", true)); }
public Map<String, byte[]> getXAttrs(Path path) throws IOException { return retrieveHeaders(path, INVOCATION_XATTR_GET_MAP); }
@Test public void testFilterEmptyXAttrs() throws Throwable { Map<String, byte[]> xAttrs = headerProcessing.getXAttrs(MAGIC_PATH, Lists.list()); Assertions.assertThat(xAttrs.keySet()) .describedAs("Attribute keys") .isEmpty(); }
@VisibleForTesting String generateBody(EventNotificationContext ctx, TeamsEventNotificationConfigV2 config) throws PermanentEventNotificationException { final List<MessageSummary> backlog = getMessageBacklog(ctx, config); Map<String, Object> model = getCustomMessageModel(ctx, config.type(), backlog, config.timeZone()); try { return templateEngine.transform(config.adaptiveCard(), model); } catch (Exception e) { String error = "Invalid Custom Message template."; LOG.error("{} [{}]", error, e.toString()); throw new PermanentEventNotificationException(error + e, e.getCause()); } }
@Test(expected = PermanentEventNotificationException.class) public void buildCustomMessageWithInvalidTemplate() throws EventNotificationException { notificationConfig = buildInvalidTemplate(); teamsEventNotification.generateBody(eventNotificationContext, notificationConfig); }
public void joinChannels() { for (IrcChannel channel : configuration.getChannelList()) { joinChannel(channel); } }
@Test public void doJoinChannels() { endpoint.joinChannels(); verify(connection).doJoin("#chan1"); verify(connection).doJoin("#chan2", "chan2key"); }
@PostMapping("/status") @Operation(summary = "Get the email status of an account") public DEmailStatusResult getEmailStatus(@RequestBody DAccountRequest deprecatedRequest) { AppSession appSession = validate(deprecatedRequest); var result = accountService.getEmailStatus(appSession.getAccountId()); return DEmailStatusResult.copyFrom(result); }
@Test public void validEmailStatusNotVerified() { DAccountRequest request = new DAccountRequest(); request.setAppSessionId("id"); EmailStatusResult result = new EmailStatusResult(); result.setStatus(Status.OK); result.setError("error"); result.setEmailStatus(EmailStatus.NOT_VERIFIED); result.setEmailAddress("address"); result.setActionNeeded(true); when(accountService.getEmailStatus(eq(1L))).thenReturn(result); DEmailStatusResult emailStatus = emailController.getEmailStatus(request); assertEquals(Status.OK, emailStatus.getStatus()); assertEquals("error", emailStatus.getError()); assertEquals(EmailStatus.NOT_VERIFIED, emailStatus.getEmailStatus()); assertEquals("address", emailStatus.getNoVerifiedEmailAddress()); assertEquals(true, emailStatus.getUserActionNeeded()); }
public static double regularizedUpperIncompleteGamma(double s, double x) { if (s < 0.0) { throw new IllegalArgumentException("Invalid s: " + s); } if (x < 0.0) { throw new IllegalArgumentException("Invalid x: " + x); } double igf = 0.0; if (x != 0.0) { if (Double.isNaN(x)) { igf = 1.0; } else { if (x < s + 1.0) { // Series representation igf = 1.0 - regularizedIncompleteGammaSeries(s, x); } else { // Continued fraction representation igf = 1.0 - regularizedIncompleteGammaFraction(s, x); } } } return igf; }
@Test public void testUpperIncompleteGamma() { System.out.println("incompleteGamma"); assertEquals(0.2193, Gamma.regularizedUpperIncompleteGamma(2.1, 3), 1E-4); assertEquals(0.6496, Gamma.regularizedUpperIncompleteGamma(3, 2.1), 1E-4); }
@Override public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) { String propertyTypeName = getTypeName(node); JType type; if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) { type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else if (node.has("existingJavaType")) { String typeName = node.path("existingJavaType").asText(); if (isPrimitive(typeName, jClassContainer.owner())) { type = primitiveType(typeName, jClassContainer.owner()); } else { type = resolveType(jClassContainer, typeName); } } else if (propertyTypeName.equals("string")) { type = jClassContainer.owner().ref(String.class); } else if (propertyTypeName.equals("number")) { type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("integer")) { type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("boolean")) { type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("array")) { type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else { type = jClassContainer.owner().ref(Object.class); } if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) { type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema); } else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) { type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema); } return type; }
@Test public void applyDefaultsToTypeAnyObject() { JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName()); ObjectNode objectNode = new ObjectMapper().createObjectNode(); JType result = rule.apply("fooBar", objectNode, null, jpackage, null); assertThat(result.fullName(), is(Object.class.getName())); }
public static DistCpOptions parse(String[] args) throws IllegalArgumentException { CommandLineParser parser = new CustomParser(); CommandLine command; try { command = parser.parse(cliOptions, args, true); } catch (ParseException e) { throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e); } DistCpOptions.Builder builder = parseSourceAndTargetPaths(command); builder .withAtomicCommit( command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch())) .withSyncFolder( command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch())) .withDeleteMissing( command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) .withIgnoreFailures( command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch())) .withOverwrite( command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch())) .withAppend( command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) .withSkipCRC( command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch())) .withBlocking( !command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) .withVerboseLog( command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch())) .withDirectWrite( command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch())) .withUseIterator( command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch())) .withUpdateRoot( command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch())); if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseDiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseRdiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) { builder.withFiltersFile( getVal(command, DistCpOptionSwitch.FILTERS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) { builder.withLogPath( new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) { final String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch()); if (workPath != null && !workPath.isEmpty()) { builder.withAtomicWorkPath(new Path(workPath)); } } if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) { builder.withTrackMissing( new Path(getVal( command, DistCpOptionSwitch.TRACK_MISSING.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) { try { final Float mapBandwidth = Float.parseFloat( getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch())); builder.withMapBandwidth(mapBandwidth); } catch (NumberFormatException e) { throw new IllegalArgumentException("Bandwidth specified is invalid: " + getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e); } } if (command.hasOption( DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) { try { final Integer numThreads = Integer.parseInt(getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())); builder.withNumListstatusThreads(numThreads); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Number of liststatus threads is invalid: " + getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) { try { final Integer maps = Integer.parseInt( getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch())); builder.maxMaps(maps); } catch (NumberFormatException e) { throw new IllegalArgumentException("Number of maps is invalid: " + getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) { builder.withCopyStrategy( getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) { builder.preserve( getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) { final String chunkSizeStr = getVal(command, DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim()); try { int csize = Integer.parseInt(chunkSizeStr); csize = csize > 0 ? csize : 0; LOG.info("Set distcp blocksPerChunk to " + csize); builder.withBlocksPerChunk(csize); } catch (NumberFormatException e) { throw new IllegalArgumentException("blocksPerChunk is invalid: " + chunkSizeStr, e); } } if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) { final String copyBufferSizeStr = getVal(command, DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim()); try { int copyBufferSize = Integer.parseInt(copyBufferSizeStr); builder.withCopyBufferSize(copyBufferSize); } catch (NumberFormatException e) { throw new IllegalArgumentException("copyBufferSize is invalid: " + copyBufferSizeStr, e); } } return builder.build(); }
@Test public void testMissingSourceInfo() { try { OptionsParser.parse(new String[] { "hdfs://localhost:8020/target/"}); Assert.fail("Neither source listing not source paths present"); } catch (IllegalArgumentException ignore) {} }
@Override public String toString() { return (this._key == null)? "" : this._key.toString(); }
@Test public void testToString() { IdResponse<Long> longIdResponse = new IdResponse<>(6L); longIdResponse.toString(); IdResponse<Long> nullIdResponse = new IdResponse<>(null); nullIdResponse.toString(); }
@Override public City find(Long id) throws ElementNotFoundException { String exceptionMessage = String.format(EXCEPTION_MESSAGE_TEMPLATE, "ID " + id); return cityRepository.find(id) .orElseThrow(createSupplierOnElementNotFound(exceptionMessage)); }
@Test void find() throws ElementNotFoundException { City expected = createCity(); Mockito.when(cityRepository.find(expected.getId())) .thenReturn(Optional.of(expected)); City actual = cityService.find(expected.getId()); ReflectionAssert.assertReflectionEquals(expected, actual); }
public FEELFnResult<Boolean> invoke(@ParameterName("list") List list) { if (list == null) { return FEELFnResult.ofResult(true); } boolean result = true; for (final Object element : list) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not" + " a Boolean")); } else { if (element != null) { result &= (Boolean) element; } } } return FEELFnResult.ofResult(result); }
@Test void invokeArrayParamTypeHeterogenousArray() { FunctionTestUtil.assertResultError(nnAllFunction.invoke(new Object[]{Boolean.TRUE, 1}), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(nnAllFunction.invoke(new Object[]{Boolean.FALSE, 1}), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(nnAllFunction.invoke(new Object[]{Boolean.TRUE, null, 1}), InvalidParametersEvent.class); }
@Override public void setConfig(RedisClusterNode node, String param, String value) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_SET, param, value); syncFuture(f); }
@Test public void testSetConfig() { RedisClusterNode master = getFirstMaster(); connection.setConfig(master, "timeout", "10"); }
@Override public ParameterByTypeTransformer parameterByTypeTransformer() { return transformer; }
@Test void can_transform_string_to_type() throws Throwable { Method method = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("transform_string_to_type", String.class, Type.class); JavaDefaultParameterTransformerDefinition definition = new JavaDefaultParameterTransformerDefinition(method, lookup); Object transformed = definition.parameterByTypeTransformer().transform("something", String.class); assertThat(transformed, is("transform_string_to_type")); }
List<MappingField> resolveFields( @Nonnull String[] externalName, @Nullable String dataConnectionName, @Nonnull Map<String, String> options, @Nonnull List<MappingField> userFields, boolean stream ) { Predicate<MappingField> pkColumnName = Options.getPkColumnChecker(options, stream); Map<String, DocumentField> dbFields = readFields(externalName, dataConnectionName, options, stream); List<MappingField> resolvedFields = new ArrayList<>(); if (userFields.isEmpty()) { for (DocumentField documentField : dbFields.values()) { MappingField mappingField = new MappingField( documentField.columnName, resolveType(documentField.columnType), documentField.columnName, documentField.columnType.name() ); mappingField.setPrimaryKey(pkColumnName.test(mappingField)); resolvedFields.add(mappingField); } } else { for (MappingField f : userFields) { String prefixIfStream = stream ? "fullDocument." : ""; String nameInMongo = f.externalName() == null ? prefixIfStream + f.name() : f.externalName(); DocumentField documentField = getField(dbFields, f, stream); if (documentField == null) { throw new IllegalArgumentException("Could not resolve field with name " + nameInMongo); } MappingField mappingField = new MappingField(f.name(), f.type(), documentField.columnName, documentField.columnType.name()); mappingField.setPrimaryKey(pkColumnName.test(mappingField)); validateType(f, documentField); resolvedFields.add(mappingField); } } return resolvedFields; }
@Test public void testResolvesMappingFieldsViaSample_withUserFields() { try (MongoClient client = MongoClients.create(mongoContainer.getConnectionString())) { String databaseName = "testDatabase"; String collectionName = "people_3"; MongoDatabase testDatabase = client.getDatabase(databaseName); MongoCollection<Document> collection = testDatabase.getCollection(collectionName); collection.insertOne(new Document("firstName", "Tomasz") .append("lastName", "Gawęda") .append("birthYear", 1992)); FieldResolver resolver = new FieldResolver(null); Map<String, String> readOpts = new HashMap<>(); readOpts.put("connectionString", mongoContainer.getConnectionString()); List<MappingField> fields = resolver.resolveFields(new String[]{databaseName, collectionName}, null, readOpts, Arrays.asList( new MappingField("id", OBJECT).setExternalName("_id"), new MappingField("birthYear", QueryDataType.BIGINT) ), false); assertThat(fields).contains( new MappingField("id", OBJECT).setPrimaryKey(true).setExternalName("_id").setExternalType("OBJECT_ID"), new MappingField("birthYear", QueryDataType.BIGINT) .setExternalName("birthYear").setPrimaryKey(false).setExternalType("INT32") ); } }
@Override public FileAttributes getFileAttributes() { checkState(this.type == Component.Type.FILE, "Only component of type FILE have a FileAttributes object"); return fileAttributes; }
@Test public void fail_with_ISE_when_calling_get_file_attributes_on_not_file() { assertThatThrownBy(() -> { ComponentImpl component = new ComponentImpl("Project", Component.Type.PROJECT, null); component.getFileAttributes(); }) .isInstanceOf(IllegalStateException.class) .hasMessage("Only component of type FILE have a FileAttributes object"); }
@Override public List<ConfigKeyInfo> connectorPluginConfig(String pluginName) { Plugins p = plugins(); Class<?> pluginClass; try { pluginClass = p.pluginClass(pluginName); } catch (ClassNotFoundException cnfe) { throw new NotFoundException("Unknown plugin " + pluginName + "."); } try (LoaderSwap loaderSwap = p.withClassLoader(pluginClass.getClassLoader())) { Object plugin = p.newPlugin(pluginName); // Contains definitions coming from Connect framework ConfigDef baseConfigDefs = null; // Contains definitions specifically declared on the plugin ConfigDef pluginConfigDefs; if (plugin instanceof SinkConnector) { baseConfigDefs = SinkConnectorConfig.configDef(); pluginConfigDefs = ((SinkConnector) plugin).config(); } else if (plugin instanceof SourceConnector) { baseConfigDefs = SourceConnectorConfig.configDef(); pluginConfigDefs = ((SourceConnector) plugin).config(); } else if (plugin instanceof Converter) { pluginConfigDefs = ((Converter) plugin).config(); } else if (plugin instanceof HeaderConverter) { pluginConfigDefs = ((HeaderConverter) plugin).config(); } else if (plugin instanceof Transformation) { pluginConfigDefs = ((Transformation<?>) plugin).config(); } else if (plugin instanceof Predicate) { pluginConfigDefs = ((Predicate<?>) plugin).config(); } else { throw new BadRequestException("Invalid plugin class " + pluginName + ". Valid types are sink, source, converter, header_converter, transformation, predicate."); } // Track config properties by name and, if the same property is defined in multiple places, // give precedence to the one defined by the plugin class // Preserve the ordering of properties as they're returned from each ConfigDef Map<String, ConfigKey> configsMap = new LinkedHashMap<>(pluginConfigDefs.configKeys()); if (baseConfigDefs != null) baseConfigDefs.configKeys().forEach(configsMap::putIfAbsent); List<ConfigKeyInfo> results = new ArrayList<>(); for (ConfigKey configKey : configsMap.values()) { results.add(AbstractHerder.convertConfigKey(configKey)); } return results; } catch (ClassNotFoundException e) { throw new ConnectException("Failed to load plugin class or one of its dependencies", e); } }
@Test public void testGetConnectorConfigDefWithBadName() throws Exception { String connName = "AnotherPlugin"; AbstractHerder herder = testHerder(); when(worker.getPlugins()).thenReturn(plugins); when(plugins.pluginClass(anyString())).thenThrow(new ClassNotFoundException()); assertThrows(NotFoundException.class, () -> herder.connectorPluginConfig(connName)); }
public static <T, K, U> AggregateOperation1<T, Map<K, U>, Map<K, U>> toMap( FunctionEx<? super T, ? extends K> keyFn, FunctionEx<? super T, ? extends U> valueFn ) { checkSerializable(keyFn, "keyFn"); checkSerializable(valueFn, "valueFn"); return toMap(keyFn, valueFn, (k, v) -> { throw new IllegalStateException("Duplicate key: " + k); }, HashMap::new); }
@Test public void when_toMapCombinesDuplicates_then_exception() { // Given AggregateOperation1<Entry<Integer, Integer>, Map<Integer, Integer>, Map<Integer, Integer>> op = toMap(Entry::getKey, Entry::getValue); BiConsumerEx<? super Map<Integer, Integer>, ? super Map<Integer, Integer>> combineFn = op.combineFn(); assertNotNull("combineFn", combineFn); Map<Integer, Integer> acc1 = op.createFn().get(); op.accumulateFn().accept(acc1, entry(1, 1)); Map<Integer, Integer> acc2 = op.createFn().get(); op.accumulateFn().accept(acc2, entry(1, 2)); // When assertThrows(IllegalStateException.class, () -> combineFn.accept(acc1, acc2)); }
@Override public synchronized void init(ProcessingEnvironment processingEnv) { super.init(processingEnv); this.koraAppElement = this.elements.getTypeElement(CommonClassNames.koraApp.canonicalName()); if (this.koraAppElement == null) { return; } this.moduleElement = this.elements.getTypeElement(CommonClassNames.module.canonicalName()); this.koraSubmoduleElement = this.elements.getTypeElement(CommonClassNames.koraSubmodule.canonicalName()); this.componentElement = this.elements.getTypeElement(CommonClassNames.component.canonicalName()); this.initialized = true; this.ctx = new ProcessingContext(processingEnv); log.info("@KoraApp processor started"); }
@Test void appWithProxies() throws Throwable { var graphDraw = testClass(AppWithValueOfComponents.class); var node1 = graphDraw.getNodes().get(0); var node2 = graphDraw.getNodes().get(1); var node3 = graphDraw.getNodes().get(2); var graph = graphDraw.init(); var value1 = graph.get(node1); var value2 = graph.get(node2); var value3 = graph.get(node3); }
private static GuardedByExpression bind(JCTree.JCExpression exp, BinderContext context) { GuardedByExpression expr = BINDER.visit(exp, context); checkGuardedBy(expr != null, String.valueOf(exp)); checkGuardedBy(expr.kind() != Kind.TYPE_LITERAL, "Raw type literal: %s", exp); return expr; }
@Test public void finalCase() { assertThat( bind( "Test", "lock", forSourceLines( "threadsafety/Test.java", "package threadsafety;", "class Test {", " final Object lock = new Object();", "}"))) .isEqualTo("(SELECT (THIS) lock)"); }
@Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest)req; HttpServletResponse response = (HttpServletResponse)res; // Do not allow framing; OF-997 response.setHeader("X-Frame-Options", JiveGlobals.getProperty("adminConsole.frame-options", "SAMEORIGIN")); // Reset the defaultLoginPage variable String loginPage = defaultLoginPage; if (loginPage == null) { loginPage = request.getContextPath() + (AuthFactory.isOneTimeAccessTokenEnabled() ? "/loginToken.jsp" : "/login.jsp" ); } // Get the page we're on: String url = request.getRequestURI().substring(1); if (url.startsWith("plugins/")) { url = url.substring("plugins/".length()); } // See if it's contained in the exclude list. If so, skip filter execution boolean doExclude = false; for (String exclude : excludes) { if (testURLPassesExclude(url, exclude)) { doExclude = true; break; } } if (!doExclude || IP_ACCESS_IGNORE_EXCLUDES.getValue()) { if (!passesBlocklist(req) || !passesAllowList(req)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } } if (!doExclude) { WebManager manager = new WebManager(); manager.init(request, response, request.getSession(), context); boolean haveOneTimeToken = manager.getAuthToken() instanceof AuthToken.OneTimeAuthToken; User loggedUser = manager.getUser(); boolean loggedAdmin = loggedUser == null ? false : adminManager.isUserAdmin(loggedUser.getUsername(), true); if (!haveOneTimeToken && !loggedAdmin && !authUserFromRequest(request)) { response.sendRedirect(getRedirectURL(request, loginPage, null)); return; } } chain.doFilter(req, res); }
@Test public void nonExcludedUrlWillErrorWhenMatchingCIDROnBlocklist() throws Exception { AuthCheckFilter.SERVLET_REQUEST_AUTHENTICATOR.setValue(AdminUserServletAuthenticatorClass.class); final AuthCheckFilter filter = new AuthCheckFilter(adminManager, loginLimitManager); final String cidr = request.getRemoteAddr().substring(0, request.getRemoteAddr().lastIndexOf('.')) + ".0/24"; AuthCheckFilter.IP_ACCESS_BLOCKLIST.setValue(Collections.singleton(cidr)); filter.doFilter(request, response, filterChain); verify(response, atLeastOnce()).sendError(anyInt()); verify(filterChain, never()).doFilter(any(), any()); }
private native static boolean isSupportedSuite(int alg, int padding);
@Test(timeout=120000) public void testIsSupportedSuite() throws Exception { Assume.assumeTrue("Skipping due to falilure of loading OpensslCipher.", OpensslCipher.getLoadingFailureReason() == null); Assert.assertFalse("Unknown suite must not be supported.", OpensslCipher.isSupported(CipherSuite.UNKNOWN)); Assert.assertTrue("AES/CTR/NoPadding is not an optional suite.", OpensslCipher.isSupported(CipherSuite.AES_CTR_NOPADDING)); }
public static String resolveRegistryContract(String chainId) { final Long chainIdLong = Long.parseLong(chainId); if (chainIdLong.equals(ChainIdLong.MAINNET)) { return MAINNET; } else if (chainIdLong.equals(ChainIdLong.ROPSTEN)) { return ROPSTEN; } else if (chainIdLong.equals(ChainIdLong.RINKEBY)) { return RINKEBY; } else if (chainIdLong.equals(ChainIdLong.GOERLI)) { return GOERLI; } else if (chainIdLong.equals(ChainIdLong.SEPOLIA)) { return SEPOLIA; } else if (chainIdLong.equals(ChainIdLong.LINEA)) { return LINEA; } else if (chainIdLong.equals(ChainIdLong.LINEA_SEPOLIA)) { return LINEA_SEPOLIA; } else { throw new EnsResolutionException( "Unable to resolve ENS registry contract for network id: " + chainId); } }
@Test public void testResolveRegistryContractInvalid() { assertThrows( EnsResolutionException.class, () -> resolveRegistryContract(ChainIdLong.NONE + "")); }
public Page<Instance> findInstancesByNamespace(String appId, String clusterName, String namespaceName, Pageable pageable) { Page<InstanceConfig> instanceConfigs = instanceConfigRepository. findByConfigAppIdAndConfigClusterNameAndConfigNamespaceNameAndDataChangeLastModifiedTimeAfter(appId, clusterName, namespaceName, getValidInstanceConfigDate(), pageable); List<Instance> instances = Collections.emptyList(); if (instanceConfigs.hasContent()) { Set<Long> instanceIds = instanceConfigs.getContent().stream().map (InstanceConfig::getInstanceId).collect(Collectors.toSet()); instances = findInstancesByIds(instanceIds); } return new PageImpl<>(instances, pageable, instanceConfigs.getTotalElements()); }
@Test @Rollback public void testFindInstancesByNamespace() throws Exception { String someConfigAppId = "someConfigAppId"; String someConfigClusterName = "someConfigClusterName"; String someConfigNamespaceName = "someConfigNamespaceName"; String someReleaseKey = "someReleaseKey"; Date someValidDate = new Date(); String someAppId = "someAppId"; String someClusterName = "someClusterName"; String someDataCenter = "someDataCenter"; String someIp = "someIp"; String anotherIp = "anotherIp"; Instance someInstance = instanceService.createInstance(assembleInstance(someAppId, someClusterName, someDataCenter, someIp)); Instance anotherInstance = instanceService.createInstance(assembleInstance(someAppId, someClusterName, someDataCenter, anotherIp)); prepareInstanceConfigForInstance(someInstance.getId(), someConfigAppId, someConfigClusterName, someConfigNamespaceName, someReleaseKey, someValidDate); prepareInstanceConfigForInstance(anotherInstance.getId(), someConfigAppId, someConfigClusterName, someConfigNamespaceName, someReleaseKey, someValidDate); Page<Instance> result = instanceService.findInstancesByNamespace(someConfigAppId, someConfigClusterName, someConfigNamespaceName, PageRequest.of(0, 10)); assertEquals(Lists.newArrayList(someInstance, anotherInstance), result.getContent()); }
public static boolean isParentAncestorOf( Table table, long snapshotId, long ancestorParentSnapshotId) { for (Snapshot snapshot : ancestorsOf(snapshotId, table::snapshot)) { if (snapshot.parentId() != null && snapshot.parentId() == ancestorParentSnapshotId) { return true; } } return false; }
@Test public void isParentAncestorOf() { assertThat(SnapshotUtil.isParentAncestorOf(table, snapshotMain1Id, snapshotBaseId)).isTrue(); assertThat(SnapshotUtil.isParentAncestorOf(table, snapshotBranchId, snapshotMain1Id)).isFalse(); assertThat(SnapshotUtil.isParentAncestorOf(table, snapshotFork2Id, snapshotFork0Id)).isTrue(); }
@Override public void report(final SortedMap<MetricName, Gauge> gauges, final SortedMap<MetricName, Counter> counters, final SortedMap<MetricName, Histogram> histograms, final SortedMap<MetricName, Meter> meters, final SortedMap<MetricName, Timer> timers) { final long now = System.currentTimeMillis(); if(logger.isDebugEnabled()) logger.debug("InfluxDbReporter report is called with counter size " + counters.size()); try { influxDb.flush(); for (Map.Entry<MetricName, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Meter> entry : meters.entrySet()) { reportMeter(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), now); } if (influxDb.hasSeriesData()) { influxDb.writeData(); } // reset counters for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { Counter counter = entry.getValue(); long count = counter.getCount(); counter.dec(count); } } catch (Exception e) { logger.error("Unable to report to InfluxDB. Discarding data.", e); } }
@Test public void reportsCounters() throws Exception { final Counter counter = mock(Counter.class); Mockito.when(counter.getCount()).thenReturn(100L); reporter.report(this.map(), this.map("counter", counter), this.map(), this.map(), this.map()); final ArgumentCaptor<InfluxDbPoint> influxDbPointCaptor = ArgumentCaptor.forClass(InfluxDbPoint.class); Mockito.verify(influxDb, atLeastOnce()).appendPoints(influxDbPointCaptor.capture()); InfluxDbPoint point = influxDbPointCaptor.getValue(); System.out.println("point = " + point); /* assertThat(point.getMeasurement()).isEqualTo("counter"); assertThat(point.getFields()).isNotEmpty(); assertThat(point.getFields()).hasSize(1); assertThat(point.getFields()).contains(entry("count", 100L)); */ }
@VisibleForTesting static Resource getUnitResource(YarnConfiguration yarnConfig) { final int unitMemMB, unitVcore; final String yarnRmSchedulerClazzName = yarnConfig.get(YarnConfiguration.RM_SCHEDULER); if (Objects.equals(yarnRmSchedulerClazzName, YARN_RM_FAIR_SCHEDULER_CLAZZ) || Objects.equals(yarnRmSchedulerClazzName, YARN_RM_SLS_FAIR_SCHEDULER_CLAZZ)) { String propMem = yarnConfig.get(YARN_RM_INCREMENT_ALLOCATION_MB_KEY); String propVcore = yarnConfig.get(YARN_RM_INCREMENT_ALLOCATION_VCORES_KEY); unitMemMB = propMem != null ? Integer.parseInt(propMem) : yarnConfig.getInt( YARN_RM_INCREMENT_ALLOCATION_MB_LEGACY_KEY, DEFAULT_YARN_RM_INCREMENT_ALLOCATION_MB); unitVcore = propVcore != null ? Integer.parseInt(propVcore) : yarnConfig.getInt( YARN_RM_INCREMENT_ALLOCATION_VCORES_LEGACY_KEY, DEFAULT_YARN_RM_INCREMENT_ALLOCATION_VCORES); } else { unitMemMB = yarnConfig.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); unitVcore = yarnConfig.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES); } return Resource.newInstance(unitMemMB, unitVcore); }
@Test void testGetUnitResource() { final int minMem = 64; final int minVcore = 1; final int incMem = 512; final int incVcore = 2; final int incMemLegacy = 1024; final int incVcoreLegacy = 4; YarnConfiguration yarnConfig = new YarnConfiguration(); yarnConfig.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, minMem); yarnConfig.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, minVcore); yarnConfig.setInt(Utils.YARN_RM_INCREMENT_ALLOCATION_MB_LEGACY_KEY, incMemLegacy); yarnConfig.setInt(Utils.YARN_RM_INCREMENT_ALLOCATION_VCORES_LEGACY_KEY, incVcoreLegacy); verifyUnitResourceVariousSchedulers( yarnConfig, minMem, minVcore, incMemLegacy, incVcoreLegacy); yarnConfig.setInt(Utils.YARN_RM_INCREMENT_ALLOCATION_MB_KEY, incMem); yarnConfig.setInt(Utils.YARN_RM_INCREMENT_ALLOCATION_VCORES_KEY, incVcore); verifyUnitResourceVariousSchedulers(yarnConfig, minMem, minVcore, incMem, incVcore); }
public static String formatExpression(final Expression expression) { return formatExpression(expression, FormatOptions.of(s -> false)); }
@Test public void shouldFormatStruct() { final SqlStruct struct = SqlStruct.builder() .field("field1", SqlTypes.INTEGER) .field("field2", SqlTypes.STRING) .build(); assertThat( ExpressionFormatter.formatExpression(new Type(struct)), equalTo("STRUCT<field1 INTEGER, field2 STRING>")); }
public static <T extends CharSequence> T[] removeBlank(T[] array) { return filter(array, StrUtil::isNotBlank); }
@Test public void removeBlankTest() { String[] a = {"a", "b", "", null, " ", "c"}; String[] resultA = {"a", "b", "c"}; assertArrayEquals(ArrayUtil.removeBlank(a), resultA); }
@Override public ValidationResult validate(Object value) { if ((allowMissing && value == null) || value instanceof List) { return new ValidationResult.ValidationPassed(); } else { return new ValidationResult.ValidationFailed("Value is not a list!"); } }
@Test public void testValidate() throws Exception { final ListValidator v = new ListValidator(); assertFalse(v.validate(null).passed()); assertFalse(v.validate(Maps.newHashMap()).passed()); assertTrue(v.validate(Lists.newArrayList()).passed()); assertTrue(v.validate(Lists.newArrayList("a", "string")).passed()); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Override public ClusterInfo get() { return getClusterInfo(); }
@Test public void testClusterSchedulerFifo() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("cluster") .path("scheduler").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); verifyClusterSchedulerFifo(json); }
public static void setRuleMechanism(String ruleMech) { if (ruleMech != null && (!ruleMech.equalsIgnoreCase(MECHANISM_HADOOP) && !ruleMech.equalsIgnoreCase(MECHANISM_MIT))) { throw new IllegalArgumentException("Invalid rule mechanism: " + ruleMech); } ruleMechanism = ruleMech; }
@Test public void testAntiPatterns() throws Exception { KerberosName.setRuleMechanism(KerberosName.MECHANISM_HADOOP); checkBadName("owen/owen/owen@FOO.COM"); checkBadName("owen@foo/bar.com"); checkBadTranslation("foo@ACME.COM"); checkBadTranslation("root/joe@FOO.COM"); KerberosName.setRuleMechanism(KerberosName.MECHANISM_MIT); checkTranslation("foo@ACME.COM", "foo@ACME.COM"); checkTranslation("root/joe@FOO.COM", "root/joe@FOO.COM"); }
@Override public ManageSnapshots removeTag(String name) { updateSnapshotReferencesOperation().removeTag(name); return this; }
@TestTemplate public void testRemoveTag() { table.newAppend().appendFile(FILE_A).commit(); long snapshotId = table.currentSnapshot().snapshotId(); // Test a basic case of creating and then removing a branch and tag table.manageSnapshots().createTag("tag1", snapshotId).commit(); table.manageSnapshots().removeTag("tag1").commit(); TableMetadata updated = table.ops().refresh(); SnapshotRef expectedTag = updated.ref("tag1"); assertThat(expectedTag).isNull(); // Test chained creating and removal of a tag table.manageSnapshots().createTag("tag2", snapshotId).removeTag("tag2").commit(); assertThat(table.ops().refresh()).isEqualTo(updated); assertThat(updated.ref("tag2")).isNull(); }
public final void setSpin(boolean spin) { this.spin = spin; }
@Test public void test_setSpin() { ReactorBuilder builder = newBuilder(); builder.setSpin(true); assertTrue(builder.spin); }
@Udf public <T> List<T> mapValues(final Map<String, T> input) { if (input == null) { return null; } return Lists.newArrayList(input.values()); }
@Test public void shouldReturnNullForNullInput() { List<Long> result = udf.mapValues((Map<String, Long>) null); assertThat(result, is(nullValue())); }
@Subscribe public void onVarbitChanged(VarbitChanged varbitChanged) { if (varbitChanged.getVarbitId() == Varbits.WINTERTODT_TIMER) { int timeToNotify = config.roundNotification(); // Sometimes wt var updates are sent to players even after leaving wt. // So only notify if in wt or after just having left. if (timeToNotify > 0 && (isInWintertodt || needRoundNotif)) { int timeInSeconds = varbitChanged.getValue() * 30 / 50; int prevTimeInSeconds = previousTimerValue * 30 / 50; log.debug("Seconds left until round start: {}", timeInSeconds); if (prevTimeInSeconds > timeToNotify && timeInSeconds <= timeToNotify) { notifier.notify("Wintertodt round is about to start"); needRoundNotif = false; } } previousTimerValue = varbitChanged.getValue(); } }
@Test public void matchStartingNotification_shouldNotify_when10SecondsOptionSelected() { when(config.roundNotification()).thenReturn(10); VarbitChanged varbitChanged = new VarbitChanged(); varbitChanged.setVarbitId(Varbits.WINTERTODT_TIMER); varbitChanged.setValue(20); wintertodtPlugin.onVarbitChanged(varbitChanged); //(10 * 50) / 30 = ~16 varbitChanged.setValue(16); wintertodtPlugin.onVarbitChanged(varbitChanged); verify(notifier, times(1)).notify("Wintertodt round is about to start"); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final ThreadPool pool = ThreadPoolFactory.get("list", concurrency); try { final String prefix = this.createPrefix(directory); if(log.isDebugEnabled()) { log.debug(String.format("List with prefix %s", prefix)); } final Path bucket = containerService.getContainer(directory); final AttributedList<Path> objects = new AttributedList<>(); String priorLastKey = null; String priorLastVersionId = null; long revision = 0L; String lastKey = null; boolean hasDirectoryPlaceholder = bucket.isRoot() || containerService.isContainer(directory); do { final VersionOrDeleteMarkersChunk chunk = session.getClient().listVersionedObjectsChunked( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), prefix, String.valueOf(Path.DELIMITER), new HostPreferences(session.getHost()).getInteger("s3.listing.chunksize"), priorLastKey, priorLastVersionId, false); // Amazon S3 returns object versions in the order in which they were stored, with the most recently stored returned first. for(BaseVersionOrDeleteMarker marker : chunk.getItems()) { final String key = URIEncoder.decode(marker.getKey()); if(new SimplePathPredicate(PathNormalizer.compose(bucket, key)).test(directory)) { if(log.isDebugEnabled()) { log.debug(String.format("Skip placeholder key %s", key)); } hasDirectoryPlaceholder = true; continue; } final PathAttributes attr = new PathAttributes(); attr.setVersionId(marker.getVersionId()); if(!StringUtils.equals(lastKey, key)) { // Reset revision for next file revision = 0L; } attr.setRevision(++revision); attr.setDuplicate(marker.isDeleteMarker() && marker.isLatest() || !marker.isLatest()); if(marker.isDeleteMarker()) { attr.setCustom(Collections.singletonMap(KEY_DELETE_MARKER, String.valueOf(true))); } attr.setModificationDate(marker.getLastModified().getTime()); attr.setRegion(bucket.attributes().getRegion()); if(marker instanceof S3Version) { final S3Version object = (S3Version) marker; attr.setSize(object.getSize()); if(StringUtils.isNotBlank(object.getEtag())) { attr.setETag(StringUtils.remove(object.getEtag(), "\"")); // The ETag will only be the MD5 of the object data when the object is stored as plaintext or encrypted // using SSE-S3. If the object is encrypted using another method (such as SSE-C or SSE-KMS) the ETag is // not the MD5 of the object data. attr.setChecksum(Checksum.parse(StringUtils.remove(object.getEtag(), "\""))); } if(StringUtils.isNotBlank(object.getStorageClass())) { attr.setStorageClass(object.getStorageClass()); } } final Path f = new Path(directory.isDirectory() ? directory : directory.getParent(), PathNormalizer.name(key), EnumSet.of(Path.Type.file), attr); if(metadata) { f.withAttributes(attributes.find(f)); } objects.add(f); lastKey = key; } final String[] prefixes = chunk.getCommonPrefixes(); final List<Future<Path>> folders = new ArrayList<>(); for(String common : prefixes) { if(new SimplePathPredicate(PathNormalizer.compose(bucket, URIEncoder.decode(common))).test(directory)) { continue; } folders.add(this.submit(pool, bucket, directory, URIEncoder.decode(common))); } for(Future<Path> f : folders) { try { objects.add(Uninterruptibles.getUninterruptibly(f)); } catch(ExecutionException e) { log.warn(String.format("Listing versioned objects failed with execution failure %s", e.getMessage())); for(Throwable cause : ExceptionUtils.getThrowableList(e)) { Throwables.throwIfInstanceOf(cause, BackgroundException.class); } throw new DefaultExceptionMappingService().map(Throwables.getRootCause(e)); } } priorLastKey = null != chunk.getNextKeyMarker() ? URIEncoder.decode(chunk.getNextKeyMarker()) : null; priorLastVersionId = chunk.getNextVersionIdMarker(); listener.chunk(directory, objects); } while(priorLastKey != null); if(!hasDirectoryPlaceholder && objects.isEmpty()) { // Only for AWS if(S3Session.isAwsHostname(session.getHost().getHostname())) { if(StringUtils.isEmpty(RequestEntityRestStorageService.findBucketInHostname(session.getHost()))) { if(log.isWarnEnabled()) { log.warn(String.format("No placeholder found for directory %s", directory)); } throw new NotfoundException(directory.getAbsolute()); } } else { // Handle missing prefix for directory placeholders in Minio final VersionOrDeleteMarkersChunk chunk = session.getClient().listVersionedObjectsChunked( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), String.format("%s%s", this.createPrefix(directory.getParent()), directory.getName()), String.valueOf(Path.DELIMITER), 1, null, null, false); if(Arrays.stream(chunk.getCommonPrefixes()).map(URIEncoder::decode).noneMatch(common -> common.equals(prefix))) { throw new NotfoundException(directory.getAbsolute()); } } } return objects; } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Listing directory {0} failed", e, directory); } finally { // Cancel future tasks pool.shutdown(false); } }
@Test public void testListFile() throws Exception { final Path container = new Path("versioning-test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final String name = new AlphanumericRandomStringService().random(); final Path file = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(new Path(container, name, EnumSet.of(Path.Type.file)), new TransferStatus()); try { new S3VersionedObjectListService(session, new S3AccessControlListFeature(session)).list(new Path(container, name, EnumSet.of(Path.Type.directory)), new DisabledListProgressListener()); fail(); } catch(NotfoundException e) { // Expected } new S3DefaultDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static <K, V> StateSerdes<K, V> withBuiltinTypes( final String topic, final Class<K> keyClass, final Class<V> valueClass) { return new StateSerdes<>(topic, Serdes.serdeFrom(keyClass), Serdes.serdeFrom(valueClass)); }
@Test public void shouldThrowForUnknownKeyTypeForBuiltinTypes() { assertThrows(IllegalArgumentException.class, () -> StateSerdes.withBuiltinTypes("anyName", Class.class, byte[].class)); }
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) { return decoder.decodeFunctionResult(rawInput, outputParameters); }
@Test public void testDecodeStaticStruct() { String rawInput = "0x0000000000000000000000000000000000000000000000000000000000000001" + "0000000000000000000000000000000000000000000000000000000000000064"; assertEquals( FunctionReturnDecoder.decode( rawInput, AbiV2TestFixture.getBarFunction.getOutputParameters()), Collections.singletonList( new AbiV2TestFixture.Bar(BigInteger.ONE, BigInteger.valueOf(100)))); }
@SuppressWarnings("checkstyle:NestedIfDepth") @Nullable public PartitioningStrategy getPartitioningStrategy( String mapName, PartitioningStrategyConfig config, final List<PartitioningAttributeConfig> attributeConfigs ) { if (attributeConfigs != null && !attributeConfigs.isEmpty()) { return cache.computeIfAbsent(mapName, k -> createAttributePartitionStrategy(attributeConfigs)); } if (config != null && config.getPartitioningStrategy() != null) { return config.getPartitioningStrategy(); } if (config != null && config.getPartitioningStrategyClass() != null) { PartitioningStrategy<?> strategy = cache.get(mapName); if (strategy != null) { return strategy; } try { // We don't use computeIfAbsent intentionally so that the map isn't blocked if the instantiation takes a // long time - it's user code strategy = ClassLoaderUtil.newInstance(configClassLoader, config.getPartitioningStrategyClass()); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } cache.putIfAbsent(mapName, strategy); return strategy; } return null; }
@Test public void whenPartitioningStrategyClassDefined_getPartitioningStrategy_returnsNewInstance() { PartitioningStrategyConfig cfg = new PartitioningStrategyConfig(); cfg.setPartitioningStrategyClass("com.hazelcast.partition.strategy.StringPartitioningStrategy"); PartitioningStrategy partitioningStrategy = partitioningStrategyFactory.getPartitioningStrategy(mapName, cfg, null); assertEquals(StringPartitioningStrategy.class, partitioningStrategy.getClass()); }
@Override public String getSQLQueryFields( String tableName ) { return "SELECT * FROM " + tableName + getLimitClause( 0 ); }
@Test public void testQuerySchema() { DatabricksDatabaseMeta db = new DatabricksDatabaseMeta(); assertEquals( "SELECT * FROM thetable LIMIT 0", db.getSQLQueryFields( "thetable" ) ); }
@Override public void onWorkflowFinalized(Workflow workflow) { WorkflowSummary summary = StepHelper.retrieveWorkflowSummary(objectMapper, workflow.getInput()); WorkflowRuntimeSummary runtimeSummary = retrieveWorkflowRuntimeSummary(workflow); String reason = workflow.getReasonForIncompletion(); LOG.info( "Workflow {} with execution_id [{}] is finalized with internal state [{}] and reason [{}]", summary.getIdentity(), workflow.getWorkflowId(), workflow.getStatus(), reason); metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "onWorkflowFinalized", MetricConstants.STATUS_TAG, workflow.getStatus().name()); if (reason != null && workflow.getStatus() == Workflow.WorkflowStatus.FAILED && reason.startsWith(MaestroStartTask.DEDUP_FAILURE_PREFIX)) { LOG.info( "Workflow {} with execution_id [{}] has not actually started, thus skip onWorkflowFinalized.", summary.getIdentity(), workflow.getWorkflowId()); return; // special case doing nothing } WorkflowInstance.Status instanceStatus = instanceDao.getWorkflowInstanceStatus( summary.getWorkflowId(), summary.getWorkflowInstanceId(), summary.getWorkflowRunId()); if (instanceStatus == null || (instanceStatus.isTerminal() && workflow.getStatus().isTerminal())) { LOG.info( "Workflow {} with execution_id [{}] does not exist or already " + "in a terminal state [{}] with internal state [{}], thus skip onWorkflowFinalized.", summary.getIdentity(), workflow.getWorkflowId(), instanceStatus, workflow.getStatus()); return; } Map<String, Task> realTaskMap = TaskHelper.getUserDefinedRealTaskMap(workflow); // cancel internally failed tasks realTaskMap.values().stream() .filter(task -> !StepHelper.retrieveStepStatus(task.getOutputData()).isTerminal()) .forEach(task -> maestroTask.cancel(workflow, task, null)); WorkflowRuntimeOverview overview = TaskHelper.computeOverview( objectMapper, summary, runtimeSummary.getRollupBase(), realTaskMap); try { validateAndUpdateOverview(overview, summary); switch (workflow.getStatus()) { case TERMINATED: // stopped due to stop request if (reason != null && reason.startsWith(FAILURE_REASON_PREFIX)) { update(workflow, WorkflowInstance.Status.FAILED, summary, overview); } else { update(workflow, WorkflowInstance.Status.STOPPED, summary, overview); } break; case TIMED_OUT: update(workflow, WorkflowInstance.Status.TIMED_OUT, summary, overview); break; default: // other status (FAILED, COMPLETED, PAUSED, RUNNING) to be handled here. Optional<Task.Status> done = TaskHelper.checkProgress(realTaskMap, summary, overview, true); switch (done.orElse(Task.Status.IN_PROGRESS)) { /** * This is a special status to indicate that the workflow has succeeded. Check {@link * TaskHelper#checkProgress} for more details. */ case FAILED_WITH_TERMINAL_ERROR: WorkflowInstance.Status nextStatus = AggregatedViewHelper.deriveAggregatedStatus( instanceDao, summary, WorkflowInstance.Status.SUCCEEDED, overview); if (!nextStatus.isTerminal()) { throw new MaestroInternalError( "Invalid status: [%s], expecting a terminal one", nextStatus); } update(workflow, nextStatus, summary, overview); break; case FAILED: case CANCELED: // due to step failure update(workflow, WorkflowInstance.Status.FAILED, summary, overview); break; case TIMED_OUT: update(workflow, WorkflowInstance.Status.TIMED_OUT, summary, overview); break; // all other status are invalid default: metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "invalidStatusOnWorkflowFinalized"); throw new MaestroInternalError( "Invalid status [%s] onWorkflowFinalized", workflow.getStatus()); } break; } } catch (MaestroInternalError | IllegalArgumentException e) { // non-retryable error and still fail the instance LOG.warn("onWorkflowFinalized is failed with a non-retryable error", e); metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "nonRetryableErrorOnWorkflowFinalized"); update( workflow, WorkflowInstance.Status.FAILED, summary, overview, Details.create( e.getMessage(), "onWorkflowFinalized is failed with non-retryable error.")); } }
@Test public void testWorkflowFinalizedNotCreatedTasks() { Task task = new Task(); task.setReferenceTaskName("bar"); Map<String, Object> summary = new HashMap<>(); summary.put("runtime_state", Collections.singletonMap("status", "CREATED")); summary.put("type", "NOOP"); task.setOutputData(Collections.singletonMap(Constants.STEP_RUNTIME_SUMMARY_FIELD, summary)); task.setTaskType(Constants.MAESTRO_TASK_NAME); when(stepInstanceDao.getAllStepStates(any(), anyLong(), anyLong())).thenReturn(new HashMap<>()); when(workflow.getTasks()).thenReturn(Collections.singletonList(task)); when(workflow.getStatus()).thenReturn(Workflow.WorkflowStatus.TERMINATED); when(instanceDao.getWorkflowInstanceStatus(eq("test-workflow-id"), anyLong(), anyLong())) .thenReturn(WorkflowInstance.Status.IN_PROGRESS); statusListener.onWorkflowFinalized(workflow); Assert.assertEquals( 1L, metricRepo .getCounter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, MaestroWorkflowStatusListener.class, "type", "onWorkflowFinalized", "status", "TERMINATED") .count()); verify(instanceDao, times(1)) .updateWorkflowInstance( any(), any(), any(), eq(WorkflowInstance.Status.STOPPED), anyLong()); verify(publisher, times(1)).publishOrThrow(any(WorkflowInstanceUpdateJobEvent.class), any()); verify(maestroTask, times(1)).cancel(workflow, task, null); }
public int getCacheVersion() throws KettleException { HashCodeBuilder hashCodeBuilder = new HashCodeBuilder( 17, 31 ) // info .append( this.getName() ) .append( this.getTransformationType() ) .append( this.getSizeRowset() ) .append( this.getSleepTimeEmpty() ) .append( this.getSleepTimeFull() ) .append( this.isUsingUniqueConnections() ) .append( this.isFeedbackShown() ) .append( this.getFeedbackSize() ) .append( this.isUsingThreadPriorityManagment() ) .append( this.getSharedObjectsFile() ) .append( this.isCapturingStepPerformanceSnapShots() ) .append( this.getStepPerformanceCapturingDelay() ) .append( this.getStepPerformanceCapturingSizeLimit() ) .append( this.getMaxDateConnection() ) .append( this.getMaxDateTable() ) .append( this.getMaxDateField() ) .append( this.getMaxDateOffset() ) .append( this.getMaxDateDifference() ) .append( this.getDependencies() ) .append( this.getPartitionSchemas() ) .append( this.getSlaveServers() ) .append( this.getClusterSchemas() ) .append( this.getSlaveStepCopyPartitionDistribution() ) .append( this.isSlaveTransformation() ) .append( this.nrTransHops() ) // steps .append( this.getSteps().size() ) .append( this.getStepNames() ) // hops .append( this.hops ); List<StepMeta> steps = this.getSteps(); for ( StepMeta step : steps ) { hashCodeBuilder .append( step.getName() ) .append( step.getStepMetaInterface().getXML() ) .append( step.getClusterSchema() ) .append( step.getRemoteInputSteps() ) .append( step.getRemoteOutputSteps() ) .append( step.isDoingErrorHandling() ); } return hashCodeBuilder.toHashCode(); }
@Test public void testGetCacheVersion() throws Exception { TransMeta transMeta = new TransMeta( getClass().getResource( "one-step-trans.ktr" ).getPath() ); int oldCacheVersion = transMeta.getCacheVersion(); transMeta.setSizeRowset( 10 ); int currCacheVersion = transMeta.getCacheVersion(); assertNotEquals( oldCacheVersion, currCacheVersion ); }
@ApiOperation(value = "Query for historic variable instances", tags = { "History", "Query" }, notes = "All supported JSON parameter fields allowed are exactly the same as the parameters found for getting a collection of historic process instances," + " but passed in as JSON-body arguments rather than URL-parameters to allow for more advanced querying and preventing errors with request-uri’s that are too long. " + "On top of that, the query allows for filtering based on process variables. " + "The variables property is a JSON-array containing objects with the format as described here.") @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates request was successful and the tasks are returned"), @ApiResponse(code = 400, message = "Indicates an parameter was passed in the wrong format. The status-message contains additional information.") }) @PostMapping(value = "/query/historic-variable-instances", produces = "application/json") public DataResponse<HistoricVariableInstanceResponse> queryVariableInstances(@RequestBody HistoricVariableInstanceQueryRequest queryRequest, @ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) { return getQueryResponse(queryRequest, allRequestParams); }
@Test @Deployment public void testQueryVariableInstances() throws Exception { HashMap<String, Object> processVariables = new HashMap<>(); processVariables.put("stringVar", "Azerty"); processVariables.put("intVar", 67890); processVariables.put("booleanVar", false); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.complete(task.getId()); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.setVariableLocal(task.getId(), "taskVariable", "test"); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_VARIABLE_INSTANCE_QUERY); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("variableName", "stringVar"); assertResultsPresentInDataResponse(url, requestNode, 2, "stringVar", "Azerty"); requestNode = objectMapper.createObjectNode(); requestNode.put("variableName", "booleanVar"); assertResultsPresentInDataResponse(url, requestNode, 2, "booleanVar", false); requestNode = objectMapper.createObjectNode(); requestNode.put("variableName", "booleanVar2"); assertResultsPresentInDataResponse(url, requestNode, 0, null, null); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance.getId()); assertResultsPresentInDataResponse(url, requestNode, 4, "taskVariable", "test"); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance.getId()); requestNode.put("excludeTaskVariables", true); assertResultsPresentInDataResponse(url, requestNode, 3, "intVar", 67890); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance2.getId()); assertResultsPresentInDataResponse(url, requestNode, 3, "stringVar", "Azerty"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskId", task.getId()); assertResultsPresentInDataResponse(url, requestNode, 1, "taskVariable", "test"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskId", task.getId()); requestNode.put("variableName", "booleanVar"); assertResultsPresentInDataResponse(url, requestNode, 0, null, null); requestNode = objectMapper.createObjectNode(); requestNode.put("variableNameLike", "%Var"); assertResultsPresentInDataResponse(url, requestNode, 6, "stringVar", "Azerty"); requestNode = objectMapper.createObjectNode(); requestNode.put("variableNameLike", "%Var2"); assertResultsPresentInDataResponse(url, requestNode, 0, null, null); requestNode = objectMapper.createObjectNode(); ArrayNode variableArray = objectMapper.createArrayNode(); ObjectNode variableNode = objectMapper.createObjectNode(); variableArray.add(variableNode); requestNode.set("variables", variableArray); variableNode.put("name", "stringVar"); variableNode.put("value", "Azerty"); variableNode.put("operation", "equals"); assertResultsPresentInDataResponse(url, requestNode, 2, "stringVar", "Azerty"); variableNode.removeAll(); requestNode.set("variables", variableArray); variableNode.put("name", "taskVariable"); variableNode.put("value", "test"); variableNode.put("operation", "equals"); assertResultsPresentInDataResponse(url, requestNode, 1, "taskVariable", "test"); variableNode.removeAll(); requestNode.set("variables", variableArray); variableNode.put("name", "taskVariable"); variableNode.put("value", "test"); variableNode.put("operation", "notEquals"); assertErrorResult(url, requestNode, HttpStatus.SC_BAD_REQUEST); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testBagClearBeforeRead() throws Exception { StateTag<BagState<String>> addr = StateTags.bag("bag", StringUtf8Coder.of()); BagState<String> bag = underTest.state(NAMESPACE, addr); bag.clear(); bag.add("hello"); assertThat(bag.read(), Matchers.containsInAnyOrder("hello")); // Shouldn't need to read from windmill for this. Mockito.verifyZeroInteractions(mockReader); }
public ChannelFuture writeOneInbound(Object msg) { return writeOneInbound(msg, newPromise()); }
@Test public void testWriteOneInbound() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicInteger flushCount = new AtomicInteger(0); EmbeddedChannel channel = new EmbeddedChannel(new ChannelInboundHandlerAdapter() { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { ReferenceCountUtil.release(msg); latch.countDown(); } @Override public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { flushCount.incrementAndGet(); } }); channel.writeOneInbound("Hello, Netty!"); if (!latch.await(1L, TimeUnit.SECONDS)) { fail("Nobody called #channelRead() in time."); } channel.close().syncUninterruptibly(); // There was no #flushInbound() call so nobody should have called // #channelReadComplete() assertEquals(0, flushCount.get()); }
@SuppressWarnings({"PMD.AvoidInstantiatingObjectsInLoops"}) public void validate(Workflow workflow, User caller) { try { RunProperties runProperties = new RunProperties(); runProperties.setOwner(caller); Map<String, ParamDefinition> workflowParams = workflow.getParams(); Map<String, ParamDefinition> defaultDryRunParams = defaultParamManager.getDefaultDryRunParams(); // add run params to override params with known invalid defaults Map<String, ParamDefinition> filteredParams = defaultDryRunParams.entrySet().stream() .filter( entry -> workflowParams != null && workflowParams.containsKey(entry.getKey()) && workflowParams.get(entry.getKey()).getType() == entry.getValue().getType()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Initiator initiator = new ValidationInitiator(); initiator.setCaller(caller); RunRequest runRequest = RunRequest.builder() .initiator(initiator) .currentPolicy(RunPolicy.START_FRESH_NEW_RUN) .runParams(filteredParams) .build(); WorkflowInstance workflowInstance = workflowHelper.createWorkflowInstance(workflow, 1L, 1L, runProperties, runRequest); WorkflowSummary workflowSummary = workflowHelper.createWorkflowSummaryFromInstance(workflowInstance); // todo: improve to traverse in DAG order to validate steps and their params for (Step step : workflow.getSteps()) { StepRuntimeSummary runtimeSummary = StepRuntimeSummary.builder() .stepId(step.getId()) .stepAttemptId(1L) .stepInstanceId(1L) .stepInstanceUuid(UUID.randomUUID().toString()) .stepName(StepHelper.getStepNameOrDefault(step)) .tags(step.getTags()) .type(step.getType()) .subType(step.getSubType()) .params(new LinkedHashMap<>()) .transition(StepInstanceTransition.from(step)) .synced(true) .dependencies(Collections.emptyMap()) .build(); paramsManager.generateMergedStepParams( workflowSummary, step, stepRuntimeMap.get(step.getType()), runtimeSummary); } } catch (Exception e) { throw new MaestroDryRunException( e, "Exception during dry run validation for workflow %s Error=[%s] Type=[%s] StackTrace=[%s]", workflow.getId(), e.getMessage(), e.getClass(), ExceptionHelper.getStackTrace(e, MAX_STACKTRACE_LINES)); } }
@Test public void testValidateDefaultRunParams() { definition .getWorkflow() .getParams() .put("FROM_DATE", ParamDefinition.buildParamDefinition("FROM_DATE", "YYYYMMDD")); definition .getWorkflow() .getParams() .put("DIFFERENT_TYPE", ParamDefinition.buildParamDefinition("DIFFERENT_TYPE", "YYYYMMDD")); Map<String, ParamDefinition> paramDefs = new HashMap<>(); paramDefs.put("FROM_DATE", ParamDefinition.buildParamDefinition("FROM_DATE", "20210101")); paramDefs.put("DIFFERENT_TYPE", ParamDefinition.buildParamDefinition("DIFFERENT_TYPE", 1234)); paramDefs.put("MISSING", ParamDefinition.buildParamDefinition("MISSING", 1234)); when(defaultParamsManager.getDefaultDryRunParams()).thenReturn(paramDefs); ArgumentCaptor<RunRequest> captor = ArgumentCaptor.forClass(RunRequest.class); when(paramsManager.generateMergedWorkflowParams(any(), any())) .thenReturn(new LinkedHashMap<>()); when(paramsManager.generateMergedStepParams(any(), any(), any(), any())) .thenReturn(new LinkedHashMap<>()); dryRunValidator.validate(definition.getWorkflow(), user); // should only use default params matching workflow and also matching type of workflow param Map<String, ParamDefinition> expectedRunParams = Collections.singletonMap( "FROM_DATE", ParamDefinition.buildParamDefinition("FROM_DATE", "20210101")); verify(paramsManager, times(1)).generateMergedWorkflowParams(any(), captor.capture()); assertEquals(expectedRunParams, captor.getValue().getRunParams()); }
@VisibleForTesting WxMaService getWxMaService(Integer userType) { // 第一步,查询 DB 的配置项,获得对应的 WxMaService 对象 SocialClientDO client = socialClientMapper.selectBySocialTypeAndUserType( SocialTypeEnum.WECHAT_MINI_APP.getType(), userType); if (client != null && Objects.equals(client.getStatus(), CommonStatusEnum.ENABLE.getStatus())) { return wxMaServiceCache.getUnchecked(client.getClientId() + ":" + client.getClientSecret()); } // 第二步,不存在 DB 配置项,则使用 application-*.yaml 对应的 WxMaService 对象 return wxMaService; }
@Test public void testGetWxMaService_clientNull() { // 准备参数 Integer userType = randomPojo(UserTypeEnum.class).getValue(); // mock 方法 // 调用 WxMaService result = socialClientService.getWxMaService(userType); // 断言 assertSame(wxMaService, result); }
protected void validateTopic(Resource topic) { GrpcValidator.getInstance().validateTopic(topic); }
@Test public void testValidateTopic() { assertThrows(GrpcProxyException.class, () -> messingActivity.validateTopic(Resource.newBuilder().build())); assertThrows(GrpcProxyException.class, () -> messingActivity.validateTopic(Resource.newBuilder().setName(TopicValidator.RMQ_SYS_TRACE_TOPIC).build())); assertThrows(GrpcProxyException.class, () -> messingActivity.validateTopic(Resource.newBuilder().setName("@").build())); assertThrows(GrpcProxyException.class, () -> messingActivity.validateTopic(Resource.newBuilder().setName(createString(128)).build())); messingActivity.validateTopic(Resource.newBuilder().setName(createString(127)).build()); }
public StreamPullQueryMetadata createStreamPullQuery( final ServiceContext serviceContext, final ImmutableAnalysis analysis, final ConfiguredStatement<Query> statementOrig, final boolean excludeTombstones ) { final boolean streamPullQueriesEnabled = statementOrig .getSessionConfig() .getConfig(true) .getBoolean(KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED); if (!streamPullQueriesEnabled) { throw new KsqlStatementException( "Pull queries on streams are disabled. To create a push query on the stream," + " add EMIT CHANGES to the end. To enable pull queries on streams, set" + " the " + KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED + " config to 'true'.", statementOrig.getMaskedStatementText() ); } // Stream pull query overrides. final Map<String, Object> overrides = new HashMap<>(statementOrig.getSessionConfig().getOverrides()); // Starting from earliest is semantically necessary. overrides.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // Using a single thread keeps these queries as lightweight as possible, since we are // not counting them against the transient query limit. overrides.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1); // There's no point in EOS, since this query only produces side effects. overrides.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.AT_LEAST_ONCE); final ConfiguredStatement<Query> statement = statementOrig.withConfigOverrides(overrides); final ImmutableMap<TopicPartition, Long> endOffsets = getQueryInputEndOffsets(analysis, serviceContext.getAdminClient()); final TransientQueryMetadata transientQueryMetadata = EngineExecutor .create(primaryContext, serviceContext, statement.getSessionConfig()) .executeStreamPullQuery(statement, excludeTombstones, endOffsets); QueryLogger.info( "Streaming stream pull query results '{}' from earliest to " + endOffsets, statement.getMaskedStatementText() ); return new StreamPullQueryMetadata(transientQueryMetadata, endOffsets); }
@Test public void shouldCheckStreamPullQueryEnabledFlag() { setupKsqlEngineWithSharedRuntimeEnabled(); @SuppressWarnings("unchecked") final ConfiguredStatement<Query> statementOrig = mock(ConfiguredStatement.class); when(statementOrig.getMaskedStatementText()).thenReturn("TEXT"); final SessionConfig mockSessionConfig = mock(SessionConfig.class); final KsqlConfig mockConfig = mock(KsqlConfig.class); when(statementOrig.getSessionConfig()).thenReturn(mockSessionConfig); when(mockSessionConfig.getConfig(eq(true))).thenReturn(mockConfig); when(mockConfig.getBoolean(eq(KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED))) .thenReturn(false); final KsqlStatementException ksqlStatementException = assertThrows( KsqlStatementException.class, () -> ksqlEngine.createStreamPullQuery( null, null, statementOrig, false ) ); assertThat(ksqlStatementException.getSqlStatement(), is("TEXT")); assertThat(ksqlStatementException.getRawMessage(), is( "Pull queries on streams are disabled." + " To create a push query on the stream, add EMIT CHANGES to the end." + " To enable pull queries on streams," + " set the ksql.query.pull.stream.enabled config to 'true'." )); }
@Override public boolean isTemplateAvailable(ThemeContext themeContext, String viewName) { var suffix = thymeleafProperties.getSuffix(); // Currently, we only support Path here. var path = themeContext.getPath().resolve("templates").resolve(viewName + suffix); return Files.exists(path); }
@Test void templateAvailableTest() throws FileNotFoundException, URISyntaxException { var themeUrl = ResourceUtils.getURL("classpath:themes/default"); var themePath = Path.of(themeUrl.toURI()); when(thymeleafProperties.getSuffix()).thenReturn(".html"); var themeContext = ThemeContext.builder() .name("default") .path(themePath) .build(); boolean templateAvailable = provider.isTemplateAvailable(themeContext, "fake"); assertFalse(templateAvailable); templateAvailable = provider.isTemplateAvailable(themeContext, "index"); assertTrue(templateAvailable); templateAvailable = provider.isTemplateAvailable(themeContext, "timezone"); assertTrue(templateAvailable); }
public String getName() { return name; }
@Test public void getName() { assertEquals(fn, cs.getName()); }
@GetMapping("/readiness") public ResponseEntity<String> readiness(HttpServletRequest request) { ReadinessResult result = ModuleHealthCheckerHolder.getInstance().checkReadiness(); if (result.isSuccess()) { return ResponseEntity.ok().body("OK"); } return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(result.getResultMessage()); }
@Test void testReadinessSuccess() throws Exception { Mockito.when(configInfoPersistService.configInfoCount(any(String.class))).thenReturn(0); Mockito.when(serverStatusManager.getServerStatus()).thenReturn(ServerStatus.UP); ResponseEntity<String> response = healthController.readiness(null); assertEquals(200, response.getStatusCodeValue()); assertEquals("OK", response.getBody()); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldThrowOnSchemaInferenceMismatchForKey() throws Exception { // Given: when(srClient.getLatestSchemaMetadata(Mockito.any())) .thenReturn(new SchemaMetadata(1, 1, "")); when(srClient.getSchemaById(1)) .thenReturn(new AvroSchema(RAW_SCHEMA)); givenDataSourceWithSchema( TOPIC_NAME, SCHEMA, SerdeFeatures.of(SerdeFeature.SCHEMA_INFERENCE), SerdeFeatures.of(), FormatInfo.of(FormatFactory.AVRO.name()), FormatInfo.of(FormatFactory.AVRO.name()), false, false); final ConfiguredStatement<InsertValues> statement = givenInsertValues( ImmutableList.of(K0, COL0), ImmutableList.of( new StringLiteral("foo"), new StringLiteral("bar")) ); // When: final Exception e = assertThrows( KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext) ); // Then: assertThat(e.getMessage(), containsString("ksqlDB generated schema would overwrite existing key schema")); assertThat(e.getMessage(), containsString("Existing Schema: [`K0` STRING, `K1` STRING]")); assertThat(e.getMessage(), containsString("ksqlDB Generated: [`k0` STRING KEY]")); }
public TopicList getUnitTopicList(final boolean containRetry, final long timeoutMillis) throws RemotingException, MQClientException, InterruptedException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_UNIT_TOPIC_LIST, null); RemotingCommand response = this.remotingClient.invokeSync(null, request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { byte[] body = response.getBody(); if (body != null) { TopicList topicList = TopicList.decode(response.getBody(), TopicList.class); if (!containRetry) { Iterator<String> it = topicList.getTopicList().iterator(); while (it.hasNext()) { String topic = it.next(); if (topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) { it.remove(); } } } return topicList; } } default: break; } throw new MQClientException(response.getCode(), response.getRemark()); }
@Test public void assertGetUnitTopicList() throws RemotingException, InterruptedException, MQClientException { mockInvokeSync(); TopicList responseBody = new TopicList(); responseBody.getTopicList().add(defaultTopic); setResponseBody(responseBody); TopicList actual = mqClientAPI.getUnitTopicList(false, defaultTimeout); assertNotNull(actual); assertEquals(1, actual.getTopicList().size()); }
static Entry<String, String> splitTrimmedConfigStringComponent(String input) { int i; for (i = 0; i < input.length(); i++) { if (input.charAt(i) == '=') { break; } } if (i == input.length()) { throw new FormatterException("No equals sign found in SCRAM component: " + input); } String value = input.substring(i + 1); if (value.length() >= 2) { if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length() - 1); } } return new AbstractMap.SimpleImmutableEntry<>(input.substring(0, i), value); }
@Test public void testSplitTrimmedConfigStringComponentOnNameEqualsEmpty() { assertEquals(new AbstractMap.SimpleImmutableEntry<>("name", ""), ScramParser.splitTrimmedConfigStringComponent("name=")); }