focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public List<StateMachineInstance> queryStateMachineInstanceByParentId(String parentId) { return selectList(stateLogStoreSqls.getQueryStateMachineInstancesByParentIdSql(dbType), RESULT_SET_TO_STATE_MACHINE_INSTANCE, parentId); }
@Test public void testQueryStateMachineInstanceByParentId() { Assertions.assertDoesNotThrow(() -> dbAndReportTcStateLogStore.queryStateMachineInstanceByParentId("test")); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); gauges.put("total.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit() + mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("total.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed() + mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("total.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax() == -1 ? -1 : mxBean.getHeapMemoryUsage().getMax() + mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("total.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted() + mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("heap.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit()); gauges.put("heap.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed()); gauges.put("heap.max", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getMax()); gauges.put("heap.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted()); gauges.put("heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax()); } }); gauges.put("non-heap.init", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("non-heap.used", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("non-heap.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("non-heap.committed", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("non-heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getNonHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); for (final MemoryPoolMXBean pool : memoryPools) { final String poolName = name("pools", WHITESPACE.matcher(pool.getName()).replaceAll("-")); gauges.put(name(poolName, "usage"), new RatioGauge() { @Override protected Ratio getRatio() { MemoryUsage usage = pool.getUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); gauges.put(name(poolName, "max"), (Gauge<Long>) () -> pool.getUsage().getMax()); gauges.put(name(poolName, "used"), (Gauge<Long>) () -> pool.getUsage().getUsed()); gauges.put(name(poolName, "committed"), (Gauge<Long>) () -> pool.getUsage().getCommitted()); // Only register GC usage metrics if the memory pool supports usage statistics. if (pool.getCollectionUsage() != null) { gauges.put(name(poolName, "used-after-gc"), (Gauge<Long>) () -> pool.getCollectionUsage().getUsed()); } gauges.put(name(poolName, "init"), (Gauge<Long>) () -> pool.getUsage().getInit()); } return Collections.unmodifiableMap(gauges); }
@Test public void hasAGaugeForWeirdCollectionPoolUsed() { final Gauge gauge = (Gauge) gauges.getMetrics().get("pools.Weird-Pool.used-after-gc"); assertThat(gauge.getValue()) .isEqualTo(290L); }
public static OptExpression bind(Pattern pattern, GroupExpression groupExpression) { Binder binder = new Binder(pattern, groupExpression); return binder.next(); }
@Test public void testBinderDepth3() { OptExpression expr = OptExpression.create(new MockOperator(OperatorType.LOGICAL_PROJECT), OptExpression.create(new MockOperator(OperatorType.LOGICAL_JOIN), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 1)), OptExpression.create(new MockOperator(OperatorType.LOGICAL_PROJECT))), OptExpression.create(new MockOperator(OperatorType.LOGICAL_JOIN), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 2)), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 3)))); Pattern pattern1 = Pattern.create(OperatorType.PATTERN_LEAF) .addChildren(Pattern.create(OperatorType.LOGICAL_JOIN)) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)); Memo memo = new Memo(); OptExpression result = Binder.bind(pattern1, memo.init(expr)); assertEquals(OperatorType.LOGICAL_PROJECT, result.getOp().getOpType()); assertEquals(OperatorType.LOGICAL_JOIN, result.inputAt(0).getOp().getOpType()); assertEquals(OperatorType.LOGICAL_JOIN, result.inputAt(1).getOp().getOpType()); Pattern pattern2 = Pattern.create(OperatorType.LOGICAL_JOIN) .addChildren(Pattern.create(OperatorType.LOGICAL_JOIN)) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)); memo = new Memo(); assertNull(Binder.bind(pattern2, memo.init(expr))); Pattern pattern3 = Pattern.create(OperatorType.LOGICAL_PROJECT) .addChildren(Pattern.create(OperatorType.LOGICAL_JOIN).addChildren( Pattern.create(OperatorType.PATTERN_LEAF), Pattern.create(OperatorType.PATTERN_LEAF))) .addChildren(Pattern.create(OperatorType.LOGICAL_JOIN)); memo = new Memo(); result = Binder.bind(pattern3, memo.init(expr)); assertEquals(OperatorType.LOGICAL_PROJECT, result.getOp().getOpType()); assertEquals(OperatorType.LOGICAL_JOIN, result.inputAt(0).getOp().getOpType()); assertEquals(OperatorType.LOGICAL_JOIN, result.inputAt(1).getOp().getOpType()); }
public static <E> BoundedList<E> newArrayBacked(int maxLength) { return new BoundedList<>(maxLength, new ArrayList<>()); }
@Test public void testInitialCapacityMustNotBeNegative() { assertEquals("Invalid non-positive initialCapacity of -123", assertThrows(IllegalArgumentException.class, () -> BoundedList.newArrayBacked(100, -123)).getMessage()); }
public static <OutputT> Coder<OutputT> inferCoder( SingleStoreIO.RowMapper<OutputT> rowMapper, CoderRegistry registry, SchemaRegistry schemaRegistry, Logger log) { if (rowMapper instanceof SingleStoreIO.RowMapperWithCoder) { try { return ((SingleStoreIO.RowMapperWithCoder<OutputT>) rowMapper).getCoder(); } catch (Exception e) { log.warn("Unable to infer a coder from RowMapper. Attempting to infer a coder from type."); } } TypeDescriptor<OutputT> outputType = TypeDescriptors.extractFromTypeParameters( rowMapper, SingleStoreIO.RowMapper.class, new TypeDescriptors.TypeVariableExtractor< SingleStoreIO.RowMapper<OutputT>, OutputT>() {}); try { return schemaRegistry.getSchemaCoder(outputType); } catch (NoSuchSchemaException e) { log.warn( "Unable to infer a schema for type {}. Attempting to infer a coder without a schema.", outputType); } try { return registry.getCoder(outputType); } catch (CannotProvideCoderException e) { throw new IllegalArgumentException( String.format("Unable to infer a coder for type %s", outputType)); } }
@Test public void testInferCoderFromSchemaRegistry() { SchemaRegistry sr = SchemaRegistry.createDefault(); CoderRegistry cr = CoderRegistry.createDefault(); Coder<TestRow> c = SerializableCoder.of(TestRow.class); cr.registerCoderForClass(TestRow.class, c); assertEquals(c, SingleStoreUtil.inferCoder(new TestRowMapper(), cr, sr, LOG)); }
@Override public boolean login() throws LoginException { Callback[] callbacks = new Callback[2]; callbacks[0] = new NameCallback("User name"); callbacks[1] = new PasswordCallback("Password", false); try { handler.handle(callbacks); } catch (IOException | UnsupportedCallbackException ioe) { throw (LoginException)new LoginException().initCause(ioe); } String password; String username = ((NameCallback)callbacks[0]).getName(); if (username == null) return false; if (((PasswordCallback)callbacks[1]).getPassword() != null) password = new String(((PasswordCallback)callbacks[1]).getPassword()); else password=""; // authenticate will throw LoginException // in case of failed authentication authenticate(username, password); user = new UserPrincipal(username); succeeded = true; return true; }
@Test public void testAuthenticatedViaBindOnAnonConnection() throws Exception { LoginContext context = new LoginContext("AnonBindCheckUserLDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("wrongSecret".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); try { context.login(); fail("Should have failed authenticating"); } catch (FailedLoginException expected) { } }
public String getTranslation(String tag) { return search(tags, tag.getBytes(TextUrl.UTF_8)); }
@Test public void readTheList() throws IOException { InputStream resource = EhTagDatabaseTest.class.getResourceAsStream("EhTagDatabaseTest"); EhTagDatabase db; try (BufferedSource source = Okio.buffer(Okio.source(resource))) { db = new EhTagDatabase("EhTagDatabaseTest", source); } assertEquals("a", db.getTranslation("1")); assertEquals("ab", db.getTranslation("12")); assertEquals("abc", db.getTranslation("123")); assertEquals("abcd", db.getTranslation("1234")); assertEquals("1", db.getTranslation("a")); assertEquals("12", db.getTranslation("ab")); assertEquals("123", db.getTranslation("abc")); assertEquals("1234", db.getTranslation("abcd")); assertNull(db.getTranslation("21")); }
@VisibleForTesting ZonedDateTime parseZoned(final String text, final ZoneId zoneId) { final TemporalAccessor parsed = formatter.parse(text); final ZoneId parsedZone = parsed.query(TemporalQueries.zone()); ZonedDateTime resolved = DEFAULT_ZONED_DATE_TIME.apply( ObjectUtils.defaultIfNull(parsedZone, zoneId)); for (final TemporalField override : ChronoField.values()) { if (parsed.isSupported(override)) { if (!resolved.isSupported(override)) { throw new KsqlException( "Unsupported temporal field in timestamp: " + text + " (" + override + ")"); } final long value = parsed.getLong(override); if (override == ChronoField.DAY_OF_YEAR && value == LEAP_DAY_OF_THE_YEAR) { if (!parsed.isSupported(ChronoField.YEAR)) { throw new KsqlException("Leap day cannot be parsed without supplying the year field"); } // eagerly override year, to avoid mismatch with epoch year, which is not a leap year resolved = resolved.withYear(parsed.get(ChronoField.YEAR)); } resolved = resolved.with(override, value); } } return resolved; }
@Test public void shouldParseBasicLocalDate() { // Given final String format = "yyyy-MM-dd HH"; final String timestamp = "1605-11-05 10"; // When final ZonedDateTime ts = new StringToTimestampParser(format).parseZoned(timestamp, ZID); // Then assertThat(ts, sameInstant( FIFTH_OF_NOVEMBER .withHour(10) .withZoneSameInstant(ZID))); }
public static Object invokeMethod(Object target, Method method, Object... args) throws InvocationTargetException, IllegalArgumentException, SecurityException { while (true) { if (!method.isAccessible()) { method.setAccessible(true); } try { return method.invoke(target, args); } catch (IllegalAccessException ignore) { // avoid other threads executing `method.setAccessible(false)` } } }
@Test public void testInvokeMethod2() throws NoSuchMethodException, InvocationTargetException { Assertions.assertEquals(0, ReflectionUtil .invokeMethod("", "length", null, ReflectionUtil.EMPTY_ARGS)); Assertions.assertEquals(3, ReflectionUtil .invokeMethod("foo", "length", null, ReflectionUtil.EMPTY_ARGS)); Assertions.assertThrows(NoSuchMethodException.class, () -> ReflectionUtil .invokeMethod("", "size", null, ReflectionUtil.EMPTY_ARGS)); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseNullArrayElements() { SchemaAndValue schemaAndValue = Values.parseString("[null]"); assertEquals(Type.ARRAY, schemaAndValue.schema().type()); assertEquals(Collections.singletonList(null), schemaAndValue.value()); }
@Override public Integer call() throws Exception { super.call(); if ("flow".equals(type)) { FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class); List<Flow> allFlow = flowRepository.findAllForAllTenants(); allFlow.stream() .map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null)) .filter(flow -> flow != null) .forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow())); stdOut("Successfully reindex " + allFlow.size() + " flow(s)."); } else { throw new IllegalArgumentException("Reindexing type '" + type + "' is not supported"); } return 0; }
@Test void reindexFlow() { URL directory = ReindexCommandTest.class.getClassLoader().getResource("flows"); ByteArrayOutputStream out = new ByteArrayOutputStream(); System.setOut(new PrintStream(out)); try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) { EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class); embeddedServer.start(); // we use the update command to add flows to extract String[] updateArgs = { "--server", embeddedServer.getURL().toString(), "--user", "myuser:pass:word", "io.kestra.cli", directory.getPath(), }; PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs); assertThat(out.toString(), containsString("3 flow(s)")); // then we reindex them String[] reindexArgs = { "--type", "flow", }; Integer call = PicocliRunner.call(ReindexCommand.class, ctx, reindexArgs); assertThat(call, is(0)); // in local it reindex 3 flows and in CI 4 for an unknown reason assertThat(out.toString(), containsString("Successfully reindex")); } }
public void hookPendingIntent(Intent intent, PendingIntent pendingIntent) { if (this.customizeEnable) { String pushId = intent.getStringExtra(SA_PUSH_ID); this.mPendingIntent2Ids.put(pendingIntent, pushId); } }
@Test public void hookPendingIntent() { SAHelper.initSensors(mApplication); Intent intent = new Intent(); intent.putExtra("SA_PUSH_ID", "mock_push_id"); PushProcess.getInstance().hookPendingIntent(intent, null); }
@Override public BackgroundException map(final AmazonClientException e) { final StringBuilder buffer = new StringBuilder(); if(e instanceof AmazonServiceException) { final AmazonServiceException failure = (AmazonServiceException) e; this.append(buffer, failure.getErrorMessage()); if(null != failure.getErrorCode()) { switch(failure.getStatusCode()) { case HttpStatus.SC_BAD_REQUEST: switch(failure.getErrorCode()) { case "Throttling": return new RetriableAccessDeniedException(buffer.toString(), e); case "AccessDeniedException": return new AccessDeniedException(buffer.toString(), e); case "UnrecognizedClientException": return new LoginFailureException(buffer.toString(), e); } case HttpStatus.SC_FORBIDDEN: switch(failure.getErrorCode()) { case "SignatureDoesNotMatch": case "InvalidAccessKeyId": case "InvalidClientTokenId": case "InvalidSecurity": case "MissingClientTokenId": case "MissingAuthenticationToken": return new LoginFailureException(buffer.toString(), e); } } } return new DefaultHttpResponseExceptionMappingService().map(new HttpResponseException(failure.getStatusCode(), buffer.toString())); } this.append(buffer, e.getMessage()); return this.wrap(e, buffer); }
@Test public void testLoginFailure() { final AmazonServiceException f = new AmazonServiceException("message", null); f.setStatusCode(401); assertTrue(new AmazonServiceExceptionMappingService().map(f) instanceof LoginFailureException); }
NewCookie createAuthenticationCookie(SessionResponse token, ContainerRequestContext requestContext) { return makeCookie(token.getAuthenticationToken(), token.validUntil(), requestContext); }
@Test void defaultPath() { final CookieFactory cookieFactory = new CookieFactory(new HttpConfiguration()); final NewCookie cookie = cookieFactory.createAuthenticationCookie(sessionResponse, containerRequest); assertThat(cookie.getPath()).isEqualTo("/"); }
@Override public void close() { storageMemoryManager.release(); super.close(); }
@Test void testClose() throws Exception { final int numBuffers = 1; BufferPool bufferPool = globalPool.createBufferPool(numBuffers, numBuffers); TieredResultPartition partition = createTieredStoreResultPartition(1, bufferPool, false); partition.close(); assertThat(bufferPool.isDestroyed()).isTrue(); }
public void update(String namespaceName, String extensionName) throws InterruptedException { if(BuiltInExtensionUtil.isBuiltIn(namespaceName)) { LOGGER.debug("SKIP BUILT-IN EXTENSION {}", NamingUtil.toExtensionId(namespaceName, extensionName)); return; } var extension = repositories.findPublicId(namespaceName, extensionName); var extensionUpdates = new HashMap<Long, String>(); updateExtensionPublicId(extension, extensionUpdates, false); if(!extensionUpdates.isEmpty()) { repositories.updateExtensionPublicIds(extensionUpdates); } var namespaceUpdates = new HashMap<Long, String>(); updateNamespacePublicId(extension, namespaceUpdates, false); if(!namespaceUpdates.isEmpty()) { repositories.updateNamespacePublicIds(namespaceUpdates); } }
@Test public void testUpdateNoChange() throws InterruptedException { var namespaceName = "foo"; var namespacePublicId = UUID.randomUUID().toString(); var extensionName = "bar"; var extensionPublicId = UUID.randomUUID().toString(); var namespace = new Namespace(); namespace.setId(1L); namespace.setName(namespaceName); namespace.setPublicId(namespacePublicId); var extension = new Extension(); extension.setId(2L); extension.setName(extensionName); extension.setNamespace(namespace); extension.setPublicId(extensionPublicId); Mockito.when(repositories.findPublicId(namespaceName, extensionName)).thenReturn(extension); Mockito.when(repositories.findPublicId(extensionPublicId)).thenReturn(extension); Mockito.when(repositories.findNamespacePublicId(namespacePublicId)).thenReturn(extension); Mockito.when(idService.getUpstreamPublicIds(extension)).thenReturn(new PublicIds(namespacePublicId, extensionPublicId)); updateService.update(namespaceName, extensionName); Mockito.verify(repositories, Mockito.never()).updateExtensionPublicIds(Mockito.anyMap()); Mockito.verify(repositories, Mockito.never()).updateNamespacePublicIds(Mockito.anyMap()); }
public Map<String, String> extractUriTemplateVariables(String pattern, String path) { Map<String, String> variables = new LinkedHashMap<>(); boolean result = doMatch(pattern, path, true, variables); if (!result) { throw new IllegalStateException("Pattern \"" + pattern + "\" is not a match for \"" + path + "\""); } return variables; }
@Test public void testExtractUriTemplateVariables() { AntPathMatcher antPathMatcher = new AntPathMatcher(); HashMap<String, String> map = (HashMap<String, String>) antPathMatcher.extractUriTemplateVariables("/api/org/organization/{orgId}", "/api/org" + "/organization" + "/999"); assertEquals(1, map.size()); }
@Override public int actionSave(String fileName, String appName, Long lifetime, String queue) throws IOException, YarnException { int result = EXIT_SUCCESS; try { Service service = loadAppJsonFromLocalFS(fileName, appName, lifetime, queue); service.setState(ServiceState.STOPPED); String buffer = jsonSerDeser.toJson(service); ClientResponse response = getApiClient() .post(ClientResponse.class, buffer); result = processResponse(response); } catch (Exception e) { LOG.error("Fail to save application: ", e); result = EXIT_EXCEPTION_THROWN; } return result; }
@Test void testBadSave() { String fileName = "unknown_file"; String appName = "unknown_app"; long lifetime = 3600L; String queue = "default"; try { int result = badAsc.actionSave(fileName, appName, lifetime, queue); assertEquals(EXIT_EXCEPTION_THROWN, result); } catch (IOException | YarnException e) { fail(); } }
@Override public Connection connect(String url, Properties info) throws SQLException { // calciteConnection is initialized with an empty Beam schema, // we need to populate it with pipeline options, load table providers, etc return JdbcConnection.initialize((CalciteConnection) super.connect(url, info)); }
@Test public void testTimestampWithDefaultTimezone() throws Exception { TestTableProvider tableProvider = new TestTableProvider(); Connection connection = JdbcDriver.connect(tableProvider, PipelineOptionsFactory.create()); // A table with one TIMESTAMP column Schema schema = Schema.builder().addDateTimeField("ts").build(); connection .createStatement() .executeUpdate("CREATE EXTERNAL TABLE test (ts TIMESTAMP) TYPE 'test'"); ReadableInstant july1 = ISODateTimeFormat.dateTimeParser().parseDateTime("2018-07-01T01:02:03Z"); tableProvider.addRows("test", Row.withSchema(schema).addValue(july1).build()); ResultSet selectResult = connection.createStatement().executeQuery(String.format("SELECT ts FROM test")); selectResult.next(); Timestamp ts = selectResult.getTimestamp(1); assertThat( String.format( "Wrote %s to a table, but got back %s", ISODateTimeFormat.basicDateTime().print(july1), ISODateTimeFormat.basicDateTime().print(ts.getTime())), ts.getTime(), equalTo(july1.getMillis())); }
@Override public AttributedList<Path> search(final Path workdir, final Filter<Path> regex, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> list = new AttributedList<>(); String prefix = null; final AttributedList<Path> containers; if(workdir.isRoot()) { containers = new B2BucketListService(session, fileid).list(new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), listener); } else { containers = new AttributedList<>(Collections.singletonList(containerService.getContainer(workdir))); if(!containerService.isContainer(workdir)) { prefix = containerService.getKey(workdir) + Path.DELIMITER; } } for(Path container : containers) { String startFilename = prefix; do { final B2ListFilesResponse response = session.getClient().listFileNames( fileid.getVersionId(container), startFilename, new HostPreferences(session.getHost()).getInteger("b2.listing.chunksize"), prefix, null); for(B2FileInfoResponse info : response.getFiles()) { final Path f = new Path(String.format("%s%s%s", container.getAbsolute(), Path.DELIMITER, info.getFileName()), EnumSet.of(Path.Type.file)); if(regex.accept(f)) { list.add(f.withAttributes(new B2AttributesFinderFeature(session, fileid).toAttributes(info))); } } startFilename = response.getNextFileName(); } while(startFilename != null); } return list; } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map(e); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testSearchInRoot() throws Exception { final String name = new AlphanumericRandomStringService().random(); final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path file = new B2TouchFeature(session, fileid).touch(new Path(bucket, name, EnumSet.of(Path.Type.file)), new TransferStatus()); final B2SearchFeature feature = new B2SearchFeature(session, fileid); assertNotNull(feature.search(bucket, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); assertNotNull(feature.search(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), new SearchFilter(StringUtils.substring(name, 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); assertNotNull(feature.search(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), new SearchFilter(StringUtils.substring(name, 0, name.length() - 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public List<TradeHistoryResponse> findTradeHistories(final Long memberId, final boolean isSeller) { if (isSeller) { return findHistories(memberId, tradeHistory.sellerId::eq); } return findHistories(memberId, tradeHistory.buyerId::eq); }
@Test void 판매자의_판매_내역을_조회한다() { // when List<TradeHistoryResponse> result = tradeHistoryQueryRepository.findTradeHistories(seller.getId(), true); // then assertSoftly(softly -> { softly.assertThat(result).hasSize(1); softly.assertThat(result.get(0).buyerName()).isEqualTo(buyer.getNickname()); softly.assertThat(result.get(0).sellerName()).isEqualTo(seller.getNickname()); softly.assertThat(result.get(0).productTitle()).isEqualTo(product.getDescription().getTitle()); }); }
public static Optional<Job> getJobAnnotation(JobDetails jobDetails) { return cast(getJobAnnotations(jobDetails).filter(jobAnnotation -> jobAnnotation.annotationType().equals(Job.class)).findFirst()); }
@Test void testGetJobAnnotation() { ListAppender<ILoggingEvent> logger = LoggerAssert.initFor(JobUtils.class); assertThatCode(() -> JobUtils.getJobAnnotation(classThatDoesNotExistJobDetails().build())).doesNotThrowAnyException(); assertThat(logger).hasWarningMessageContaining("Trying to find Job Annotations for 'i.dont.exist.Class.notImportant(java.lang.Integer)' but the class could not be found. The Job name and other properties like retries and labels will not be set on the Job."); }
@Override public void registerProvider(TableProvider provider) { if (providers.containsKey(provider.getTableType())) { throw new IllegalArgumentException( "Provider is already registered for table type: " + provider.getTableType()); } initTablesFromProvider(provider); this.providers.put(provider.getTableType(), provider); }
@Test(expected = IllegalStateException.class) public void testRegisterProvider_duplicatedTableName() throws Exception { store.registerProvider(new MockTableProvider("mock", "hello", "world")); store.registerProvider(new MockTableProvider("mock1", "hello", "world")); }
public static String getRemoteAddrFromRequest(Request request, Set<IpSubnet> trustedSubnets) { final String remoteAddr = request.getRemoteAddr(); final String XForwardedFor = request.getHeader("X-Forwarded-For"); if (XForwardedFor != null) { for (IpSubnet s : trustedSubnets) { try { if (s.contains(remoteAddr)) { // Request came from trusted source, trust X-Forwarded-For and return it return XForwardedFor; } } catch (UnknownHostException e) { // ignore silently, probably not worth logging } } } // Request did not come from a trusted source, or the X-Forwarded-For header was not set return remoteAddr; }
@Test public void getRemoteAddrFromRequestWorksWithIPv6IfSubnetsContainsOnlyIPv4() throws Exception { final Request request = mock(Request.class); when(request.getRemoteAddr()).thenReturn("2001:DB8::42"); when(request.getHeader("X-Forwarded-For")).thenReturn("2001:DB8::1"); final String s = RestTools.getRemoteAddrFromRequest(request, Collections.singleton(new IpSubnet("127.0.0.1/32"))); assertThat(s).isEqualTo("2001:DB8::42"); }
@Override List<DiscoveryNode> resolveNodes() { try { return lookup(); } catch (TimeoutException e) { logger.warning(String.format("DNS lookup for serviceDns '%s' failed: DNS resolution timeout", serviceDns)); return Collections.emptyList(); } catch (UnknownHostException e) { logger.warning(String.format("DNS lookup for serviceDns '%s' failed: unknown host", serviceDns)); return Collections.emptyList(); } catch (Exception e) { logger.warning(String.format("DNS lookup for serviceDns '%s' failed", serviceDns), e); return Collections.emptyList(); } }
@Test public void resolve() { // given RawLookupProvider lookupProvider = staticLookupProvider(SERVICE_DNS, IP_SERVER_1, IP_SERVER_2); DnsEndpointResolver dnsEndpointResolver = new DnsEndpointResolver(LOGGER, SERVICE_DNS, UNSET_PORT, DEFAULT_SERVICE_DNS_TIMEOUT_SECONDS, lookupProvider); // when List<DiscoveryNode> result = dnsEndpointResolver.resolveNodes(); // then Set<?> resultAddresses = setOf(result.get(0).getPrivateAddress().getHost(), result.get(1).getPrivateAddress().getHost()); Set<?> resultPorts = setOf(result.get(0).getPrivateAddress().getPort(), result.get(1).getPrivateAddress().getPort()); assertEquals(setOf(IP_SERVER_1, IP_SERVER_2), resultAddresses); assertEquals(setOf(DEFAULT_PORT), resultPorts); }
protected static DataSource getDataSourceFromJndi( String dsName, Context ctx ) throws NamingException { if ( Utils.isEmpty( dsName ) ) { throw new NamingException( BaseMessages.getString( PKG, "DatabaseUtil.DSNotFound", String.valueOf( dsName ) ) ); } Object foundDs = FoundDS.get( dsName ); if ( foundDs != null ) { return (DataSource) foundDs; } Object lkup = null; DataSource rtn = null; NamingException firstNe = null; // First, try what they ask for... try { lkup = ctx.lookup( dsName ); if ( lkup instanceof DataSource ) { rtn = (DataSource) lkup; FoundDS.put( dsName, rtn ); return rtn; } } catch ( NamingException ignored ) { firstNe = ignored; } try { // Needed this for Jboss lkup = ctx.lookup( "java:" + dsName ); if ( lkup instanceof DataSource ) { rtn = (DataSource) lkup; FoundDS.put( dsName, rtn ); return rtn; } } catch ( NamingException ignored ) { // ignore } try { // Tomcat lkup = ctx.lookup( "java:comp/env/jdbc/" + dsName ); if ( lkup instanceof DataSource ) { rtn = (DataSource) lkup; FoundDS.put( dsName, rtn ); return rtn; } } catch ( NamingException ignored ) { // ignore } try { // Others? lkup = ctx.lookup( "jdbc/" + dsName ); if ( lkup instanceof DataSource ) { rtn = (DataSource) lkup; FoundDS.put( dsName, rtn ); return rtn; } } catch ( NamingException ignored ) { // ignore } if ( firstNe != null ) { throw firstNe; } throw new NamingException( BaseMessages.getString( PKG, "DatabaseUtil.DSNotFound", dsName ) ); }
@Test public void testNormal() throws NamingException { DataSource dataSource = mock( DataSource.class ); when( context.lookup( testName ) ).thenReturn( dataSource ); assertEquals( dataSource, DatabaseUtil.getDataSourceFromJndi( testName, context ) ); }
public Plan validateReservationUpdateRequest( ReservationSystem reservationSystem, ReservationUpdateRequest request) throws YarnException { ReservationId reservationId = request.getReservationId(); Plan plan = validateReservation(reservationSystem, reservationId, AuditConstants.UPDATE_RESERVATION_REQUEST); validateReservationDefinition(reservationId, request.getReservationDefinition(), plan, AuditConstants.UPDATE_RESERVATION_REQUEST); return plan; }
@Test public void testUpdateReservationValidRecurrenceExpression() { ReservationUpdateRequest request = createSimpleReservationUpdateRequest(1, 1, 1, 5, 3, "600000"); plan = null; try { plan = rrValidator.validateReservationUpdateRequest(rSystem, request); } catch (YarnException e) { Assert.fail(e.getMessage()); } Assert.assertNotNull(plan); }
public static ImmutableList<HttpRequest> fuzzGetParametersExpectingPathValues( HttpRequest request, String payload) { return fuzzGetParameters( request, payload, Optional.empty(), ImmutableSet.of(FuzzingModifier.FUZZING_PATHS)); }
@Test public void fuzzGetParametersExpectingPathValues_whenGetParameterValueHasPathPrefix_prefixesPayload() { HttpRequest requestWithPathPrefix = HttpRequest.get("https://google.com?key=resources/value").withEmptyHeaders().build(); HttpRequest requestWithFuzzedGetParameterWithPathPrefix = HttpRequest.get("https://google.com?key=resources/<payload>").withEmptyHeaders().build(); assertThat( FuzzingUtils.fuzzGetParametersExpectingPathValues(requestWithPathPrefix, "<payload>")) .contains(requestWithFuzzedGetParameterWithPathPrefix); }
@Override public TypeInformation<T> getProducedType() { return type; }
@Test void testTypeExtractionTupleAnonymous() { TypeInformation<Tuple2<byte[], byte[]>> type = new AbstractDeserializationSchema<Tuple2<byte[], byte[]>>() { @Override public Tuple2<byte[], byte[]> deserialize(byte[] message) { throw new UnsupportedOperationException(); } }.getProducedType(); TypeInformation<Tuple2<byte[], byte[]>> expected = TypeInformation.of(new TypeHint<Tuple2<byte[], byte[]>>() {}); assertThat(type).isEqualTo(expected); }
public Optional<String> preferredServerAssignor() { return preferredServerAssignor(Long.MAX_VALUE); }
@Test public void testPreferredServerAssignor() { ConsumerGroup consumerGroup = createConsumerGroup("foo"); ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1") .setServerAssignorName("range") .build(); ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2") .setServerAssignorName("range") .build(); ConsumerGroupMember member3 = new ConsumerGroupMember.Builder("member3") .setServerAssignorName("uniform") .build(); // The group is empty so the preferred assignor should be empty. assertEquals( Optional.empty(), consumerGroup.preferredServerAssignor() ); // Member 1 has got an updated assignor but this is not reflected in the group yet so // we pass the updated member. The assignor should be range. assertEquals( Optional.of("range"), consumerGroup.computePreferredServerAssignor(null, member1) ); // Update the group with member 1. consumerGroup.updateMember(member1); // Member 1 is in the group so the assignor should be range. assertEquals( Optional.of("range"), consumerGroup.preferredServerAssignor() ); // Member 1 has been removed but this is not reflected in the group yet so // we pass the removed member. The assignor should be range. assertEquals( Optional.empty(), consumerGroup.computePreferredServerAssignor(member1, null) ); // Member 2 has got an updated assignor but this is not reflected in the group yet so // we pass the updated member. The assignor should be range. assertEquals( Optional.of("range"), consumerGroup.computePreferredServerAssignor(null, member2) ); // Update the group with member 2. consumerGroup.updateMember(member2); // Member 1 and 2 are in the group so the assignor should be range. assertEquals( Optional.of("range"), consumerGroup.preferredServerAssignor() ); // Update the group with member 3. consumerGroup.updateMember(member3); // Member 1, 2 and 3 are in the group so the assignor should be range. assertEquals( Optional.of("range"), consumerGroup.preferredServerAssignor() ); // Members without assignors ConsumerGroupMember updatedMember1 = new ConsumerGroupMember.Builder("member1") .setServerAssignorName(null) .build(); ConsumerGroupMember updatedMember2 = new ConsumerGroupMember.Builder("member2") .setServerAssignorName(null) .build(); ConsumerGroupMember updatedMember3 = new ConsumerGroupMember.Builder("member3") .setServerAssignorName(null) .build(); // Member 1 has removed it assignor but this is not reflected in the group yet so // we pass the updated member. The assignor should be range or uniform. Optional<String> assignor = consumerGroup.computePreferredServerAssignor(member1, updatedMember1); assertTrue(assignor.equals(Optional.of("range")) || assignor.equals(Optional.of("uniform"))); // Update the group. consumerGroup.updateMember(updatedMember1); // Member 2 has removed it assignor but this is not reflected in the group yet so // we pass the updated member. The assignor should be range or uniform. assertEquals( Optional.of("uniform"), consumerGroup.computePreferredServerAssignor(member2, updatedMember2) ); // Update the group. consumerGroup.updateMember(updatedMember2); // Only member 3 is left in the group so the assignor should be uniform. assertEquals( Optional.of("uniform"), consumerGroup.preferredServerAssignor() ); // Member 3 has removed it assignor but this is not reflected in the group yet so // we pass the updated member. The assignor should be empty. assertEquals( Optional.empty(), consumerGroup.computePreferredServerAssignor(member3, updatedMember3) ); // Update the group. consumerGroup.updateMember(updatedMember3); // The group is empty so the assignor should be empty as well. assertEquals( Optional.empty(), consumerGroup.preferredServerAssignor() ); }
protected boolean shouldUpdate(final long lastChecked, final long now, final DatabaseProperties properties, String currentVersion) throws UpdateException { //check every 30 days if we know there is an update, otherwise check every 7 days final int checkRange = 30; if (!DateUtil.withinDateRange(lastChecked, now, checkRange)) { LOGGER.debug("Checking web for new version."); final String currentRelease = getCurrentReleaseVersion(); if (currentRelease != null) { final DependencyVersion v = new DependencyVersion(currentRelease); if (v.getVersionParts() != null && v.getVersionParts().size() >= 3) { updateToVersion = v.toString(); if (!currentRelease.equals(updateToVersion)) { properties.save(CURRENT_ENGINE_RELEASE, updateToVersion); } properties.save(ENGINE_VERSION_CHECKED_ON, Long.toString(now)); } } LOGGER.debug("Current Release: {}", updateToVersion); } if (updateToVersion == null) { LOGGER.debug("Unable to obtain current release"); return false; } final DependencyVersion running = new DependencyVersion(currentVersion); final DependencyVersion released = new DependencyVersion(updateToVersion); if (running.compareTo(released) < 0) { LOGGER.debug("Upgrade recommended"); return true; } LOGGER.debug("Upgrade not needed"); return false; }
@Test public void testShouldUpdate() throws Exception { new MockUp<DatabaseProperties>() { private final Properties properties = new Properties(); @Mock public void $init(CveDB db) { //empty } @Mock public void save(String key, String value) throws UpdateException { properties.setProperty(key, value); } @Mock public String getProperty(String key) { return properties.getProperty(key); } }; String updateToVersion = "1.2.6"; String currentVersion = "1.2.6"; long lastChecked = dateToSeconds("2014-12-01"); long now = dateToSeconds("2014-12-01"); EngineVersionCheck instance = new EngineVersionCheck(getSettings()); boolean expResult = false; instance.setUpdateToVersion(updateToVersion); boolean result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); updateToVersion = "1.2.5"; currentVersion = "1.2.5"; lastChecked = dateToSeconds("2014-10-01"); now = dateToSeconds("2014-12-01"); expResult = true; instance.setUpdateToVersion(updateToVersion); result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); //System.out.println(properties.getProperty(CURRENT_ENGINE_RELEASE)); updateToVersion = "1.2.5"; currentVersion = "1.2.5"; lastChecked = dateToSeconds("2014-12-01"); now = dateToSeconds("2014-12-03"); expResult = false; instance.setUpdateToVersion(updateToVersion); result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); updateToVersion = "1.2.6"; currentVersion = "1.2.5"; lastChecked = dateToSeconds("2014-12-01"); now = dateToSeconds("2014-12-03"); expResult = true; instance.setUpdateToVersion(updateToVersion); result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); updateToVersion = "1.2.5"; currentVersion = "1.2.6"; lastChecked = dateToSeconds("2014-12-01"); now = dateToSeconds("2014-12-08"); expResult = false; instance.setUpdateToVersion(updateToVersion); result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); updateToVersion = ""; currentVersion = "1.2.5"; lastChecked = dateToSeconds("2014-12-01"); now = dateToSeconds("2014-12-03"); expResult = false; instance.setUpdateToVersion(updateToVersion); result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); updateToVersion = ""; currentVersion = "1.2.5"; lastChecked = dateToSeconds("2014-12-01"); now = dateToSeconds("2015-12-08"); expResult = true; instance.setUpdateToVersion(updateToVersion); result = instance.shouldUpdate(lastChecked, now, dbProperties, currentVersion); assertEquals(expResult, result); }
@Deprecated public static Schema parse(File file) throws IOException { return new Parser().parse(file); }
@Test void parseEmptySchema() { assertThrows(SchemaParseException.class, () -> { new Schema.Parser().parse(""); }); }
public FEELFnResult<Boolean> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } boolean result = true; boolean containsNull = false; // Spec. definition: return false if any item is false, else true if all items are true, else null for ( final Object element : list ) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not a Boolean")); } else { if (element != null) { result &= (Boolean) element; } else if (!containsNull) { containsNull = true; } } } if (containsNull && result) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeListParamNull() { FunctionTestUtil.assertResultError(allFunction.invoke((List) null), InvalidParametersEvent.class); }
@Override public void putKVConfig(String namespace, String key, String value) { defaultMQAdminExtImpl.putKVConfig(namespace, key, value); }
@Test public void testPutKVConfig() throws RemotingException, MQClientException, InterruptedException { String topicConfig = defaultMQAdminExt.getKVConfig(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG, "UnitTest"); assertThat(topicConfig).isEqualTo("topicListConfig"); KVTable kvs = defaultMQAdminExt.getKVListByNamespace(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG); assertThat(kvs.getTable().get("broker-name")).isEqualTo("broker-one"); assertThat(kvs.getTable().get("cluster-name")).isEqualTo("default-cluster"); }
public String getParameter(String parameter) { String value = null; for (String str : rawResponse.split("&")) { if (str.startsWith(parameter + '=')) { final String[] part = str.split("="); if (part.length > 1) { value = part[1].trim(); } break; } } return value; }
@Test public void shouldReturnUrlParam() { final Token actual = new OAuth1AccessToken("acccess", "secret", "user_id=3107154759&screen_name=someuser&empty=&="); assertEquals("someuser", actual.getParameter("screen_name")); assertEquals("3107154759", actual.getParameter("user_id")); assertEquals(null, actual.getParameter("empty")); assertEquals(null, actual.getParameter(null)); }
@Override public Collection<SlotOffer> offerSlots( Collection<? extends SlotOffer> offers, TaskManagerLocation taskManagerLocation, TaskManagerGateway taskManagerGateway, long currentTime) { if (!isBlockedTaskManager(taskManagerLocation.getResourceID())) { return super.offerSlots(offers, taskManagerLocation, taskManagerGateway, currentTime); } else { return internalOfferSlotsFromBlockedTaskManager(offers, taskManagerLocation); } }
@TestTemplate void testOfferDuplicateSlots() { final TaskManagerLocation taskManager = new LocalTaskManagerLocation(); final List<ResourceID> blockedTaskManagers = new ArrayList<>(); final BlocklistDeclarativeSlotPool slotPool = BlocklistDeclarativeSlotPoolBuilder.builder() .setBlockedTaskManagerChecker(blockedTaskManagers::contains) .setSlotRequestMaxInterval(slotRequestMaxInterval) .setMainThreadExecutor(componentMainThreadExecutor) .build(); final ResourceCounter resourceRequirements = ResourceCounter.withResource(RESOURCE_PROFILE, 2); slotPool.increaseResourceRequirementsBy(resourceRequirements); slotPool.tryWaitSlotRequestIsDone(); SlotOffer slot1 = new SlotOffer(new AllocationID(), 1, RESOURCE_PROFILE); SlotOffer slot2 = new SlotOffer(new AllocationID(), 1, RESOURCE_PROFILE); // offer and accept slot1 assertThat( SlotPoolTestUtils.offerSlots( slotPool, Collections.singleton(slot1), taskManager)) .containsExactly(slot1); // block the task manager. blockedTaskManagers.add(taskManager.getResourceID()); // offer slot1 and slot2, accept slot1, reject slot2 assertThat(SlotPoolTestUtils.offerSlots(slotPool, Arrays.asList(slot1, slot2), taskManager)) .containsExactly(slot1); }
public static String removeUnusedImports(final String contents) throws FormatterException { Context context = new Context(); JCCompilationUnit unit = parse(context, contents); if (unit == null) { // error handling is done during formatting return contents; } UnusedImportScanner scanner = new UnusedImportScanner(JavacTrees.instance(context)); scanner.scan(unit, null); return applyReplacements( contents, buildReplacements(contents, unit, scanner.usedNames, scanner.usedInJavadoc)); }
@Test public void removeUnused() throws FormatterException { assertThat(removeUnusedImports(input)).isEqualTo(expected); }
static Props loadPropsFromCommandLineArgs(String[] args) { if (args.length != 1) { throw new IllegalArgumentException("Only a single command-line argument is accepted " + "(absolute path to configuration file)"); } File propertyFile = new File(args[0]); Properties properties = new Properties(); Reader reader = null; try { reader = new InputStreamReader(new FileInputStream(propertyFile), StandardCharsets.UTF_8); properties.load(reader); } catch (Exception e) { throw new IllegalStateException("Could not read properties from file: " + args[0], e); } finally { IOUtils.closeQuietly(reader); deleteQuietly(propertyFile); } return new Props(properties); }
@Test public void loadPropsFromCommandLineArgs_file_does_not_exist() throws Exception { File propsFile = temp.newFile(); FileUtils.deleteQuietly(propsFile); try { ConfigurationUtils.loadPropsFromCommandLineArgs(new String[] {propsFile.getAbsolutePath()}); fail(); } catch (IllegalStateException e) { assertThat(e).hasMessage("Could not read properties from file: " + propsFile.getAbsolutePath()); } }
public Collection<PostJobWrapper> selectPostJobs() { Collection<PostJob> result = sort(getFilteredExtensions(PostJob.class, null)); return result.stream() .map(j -> new PostJobWrapper(j, postJobContext, postJobOptimizer)) .filter(PostJobWrapper::shouldExecute) .toList(); }
@Test public void dependsUponPhaseForPostJob() { PrePostJob pre = new PrePostJob(); NormalPostJob normal = new NormalPostJob(); ExtensionContainer iocContainer = mock(ExtensionContainer.class); when(iocContainer.getComponentsByType(PostJob.class)).thenReturn(List.of(pre, normal)); PostJobExtensionDictionary selector = new PostJobExtensionDictionary(iocContainer, postJobOptimizer, mock(PostJobContext.class)); assertThat(selector.selectPostJobs()).extracting("wrappedPostJob").containsExactly(pre, normal); }
@Override public Tuple apply(Object input) { checkArgument(fieldsOrProperties != null, "The names of the fields/properties to read should not be null"); checkArgument(fieldsOrProperties.length > 0, "The names of the fields/properties to read should not be empty"); checkArgument(input != null, "The object to extract fields/properties from should not be null"); List<Function<Object, Object>> extractors = buildExtractors(); List<Object> values = extractValues(input, extractors); return new Tuple(values.toArray()); }
@Test void should_throw_error_with_map_when_non_existing_key_is_given() { // GIVEN Employee luke = new Employee(2L, new Name("Luke"), 22); Map<String, Employee> map = mapOf(entry("key1", YODA), entry("key2", luke)); ByNameMultipleExtractor underTest = new ByNameMultipleExtractor("key1", "key2", "bad key"); // WHEN Throwable thrown = catchThrowable(() -> underTest.apply(YODA)); // THEN then(thrown).isInstanceOf(IntrospectionError.class); }
public static void setEnvFromInputProperty(Map<String, String> env, String propName, String defaultPropValue, Configuration conf, String classPathSeparator) { String envString = conf.get(propName, defaultPropValue); // Get k,v pairs from string into a tmp env. Note that we don't want // to expand the env var values, because we will do that below - // don't want to do it twice. Map<String, String> tmpEnv = new HashMap<String, String>(); Apps.setEnvFromInputStringNoExpand(tmpEnv, envString, classPathSeparator); // Get map of props with prefix propName. // (e.g., map.reduce.env.ENV_VAR_NAME=value) Map<String, String> inputMap = conf.getPropsWithPrefix(propName + "."); // Entries from map should override entries from input string. tmpEnv.putAll(inputMap); // Add them to the environment setEnvFromInputStringMap(env, tmpEnv, classPathSeparator); }
@Test void testSetEnvFromInputProperty() { Configuration conf = new Configuration(false); Map<String, String> env = new HashMap<>(); String propName = "mapreduce.map.env"; String defaultPropName = "mapreduce.child.env"; // Setup environment input properties conf.set(propName, "env1=env1_val,env2=env2_val,env3=env3_val"); conf.set(propName + ".env4", "env4_val"); conf.set(propName + ".env2", "new_env2_val"); // Setup some default values - we shouldn't see these values conf.set(defaultPropName, "env1=def1_val,env2=def2_val,env3=def3_val"); String defaultPropValue = conf.get(defaultPropName); // These should never be referenced. conf.set(defaultPropName + ".env4", "def4_val"); conf.set(defaultPropName + ".env2", "new_def2_val"); Apps.setEnvFromInputProperty(env, propName, defaultPropValue, conf, File.pathSeparator); // Check values from string assertEquals("env1_val", env.get("env1")); assertEquals("env3_val", env.get("env3")); // Check individual value assertEquals("env4_val", env.get("env4")); // Check individual value that eclipses one in string assertEquals("new_env2_val", env.get("env2")); }
public static FactoryBuilder newFactoryBuilder(Propagation.Factory delegate) { return new FactoryBuilder(delegate); }
@Test void extract_field_multiple_key_names() { // switch to case insensitive as this example is about http :P request = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); BaggageField userId = BaggageField.create("userId"); BaggageField sessionId = BaggageField.create("sessionId"); SingleBaggageField userIdConfig = SingleBaggageField.newBuilder(userId) .addKeyName("baggage-userId") .addKeyName("baggage_userId") .build(); SingleBaggageField sessionIdConfig = SingleBaggageField.newBuilder(sessionId) .addKeyName("baggage-sessionId") .addKeyName("baggage_sessionId") .build(); factory = newFactoryBuilder(B3Propagation.FACTORY) .add(userIdConfig) .add(sessionIdConfig) .build(); initialize(); injector.inject(context, request); request.put("baggage-userId", "bob"); request.put("baggage-sessionId", "12345"); context = extractor.extract(request).context(); assertThat(userId.getValue(context)).isEqualTo("bob"); assertThat(sessionId.getValue(context)).isEqualTo("12345"); }
public FactMapping cloneFactMapping() { return new FactMapping(this); }
@Test public void cloneFactMapping() { original = new FactMapping("FACT_ALIAS", FactIdentifier.create("FI_TEST", "com.test.Foo"), new ExpressionIdentifier("EI_TEST", GIVEN)); original.addExpressionElement("FIRST_STEP", String.class.getName()); original.setExpressionAlias("EA_TEST"); original.setGenericTypes(new ArrayList<>()); assertThat(original.cloneFactMapping()).isEqualTo(original); }
public synchronized boolean hasLeadingOutOfBandData() { if (size() > 0) { if (!hasStartOfBlock() || startOfBlockIndex > 0) { return true; } } return false; }
@Test public void testHasLeadingOutOfBandData() throws Exception { assertFalse(instance.hasLeadingOutOfBandData(), "Unexpected initial value"); instance.write(buildTestBytes(true, true, true)); assertFalse(instance.hasLeadingOutOfBandData()); instance.write("BLAH".getBytes()); assertFalse(instance.hasLeadingOutOfBandData()); instance.reset(); assertFalse(instance.hasLeadingOutOfBandData()); instance.write("BLAH".getBytes()); instance.write(buildTestBytes(true, true, true)); assertTrue(instance.hasLeadingOutOfBandData()); }
@Override public MapSettings setProperty(String key, String value) { return (MapSettings) super.setProperty(key, value); }
@Test public void set_property_int() { Settings settings = new MapSettings(); settings.setProperty("foo", 123); assertThat(settings.getInt("foo")).isEqualTo(123); assertThat(settings.getString("foo")).isEqualTo("123"); assertThat(settings.getBoolean("foo")).isFalse(); }
public String getXML() throws KettleException { // See PDI-15781 boolean sendResultXmlWithStatus = EnvUtil.getSystemProperty( "KETTLE_COMPATIBILITY_SEND_RESULT_XML_WITH_FULL_STATUS", "N" ).equalsIgnoreCase( "Y" ); return getXML( sendResultXmlWithStatus ); }
@Test public void testGetXML() throws KettleException { SlaveServerTransStatus transStatus = new SlaveServerTransStatus(); RowMetaAndData rowMetaAndData = new RowMetaAndData(); String testData = "testData"; rowMetaAndData.addValue( new ValueMetaString(), testData ); List<RowMetaAndData> rows = new ArrayList<>(); rows.add( rowMetaAndData ); Result result = new Result(); result.setRows( rows ); transStatus.setResult( result ); //PDI-15781 Assert.assertFalse( transStatus.getXML().contains( testData ) ); //PDI-17061 Assert.assertTrue( transStatus.getXML( true ).contains( testData ) ); }
@Override public Enumeration<URL> getResources(String name) throws IOException { List<URL> resources = new ArrayList<>(); ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resources '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { switch (classLoadingSource) { case APPLICATION: if (getParent() != null) { resources.addAll(Collections.list(getParent().getResources(name))); } break; case PLUGIN: resources.addAll(Collections.list(findResources(name))); break; case DEPENDENCIES: resources.addAll(findResourcesFromDependencies(name)); break; } } return Collections.enumeration(resources); }
@Test void parentLastGetResourcesExistsInParentAndDependencyAndPlugin() throws URISyntaxException, IOException { Enumeration<URL> resources = parentLastPluginClassLoader.getResources("META-INF/file-in-both-parent-and-dependency-and-plugin"); assertNumberOfResourcesAndFirstLineOfFirstElement(3, "plugin", resources); }
public boolean getUnalignedCheckpoint() { return unalignedCheckpoint; }
@Test void testUnalignedCheckpointType() { CheckpointMetricsBuilder metricsBuilder = new CheckpointMetricsBuilder(); metricsBuilder.setBytesProcessedDuringAlignment(0L); metricsBuilder.setAlignmentDurationNanos(0L); metricsBuilder.setBytesPersistedOfThisCheckpoint(0L); metricsBuilder.setTotalBytesPersisted(0L); // The checkpoint shouldn't be unaligned checkpoint when builder doesn't set // bytesPersistedDuringAlignment assertThat(metricsBuilder.build().getUnalignedCheckpoint()).isFalse(); assertThat(metricsBuilder.buildIncomplete().getUnalignedCheckpoint()).isFalse(); assertUnalignedCheckpointType(metricsBuilder, 0L); assertUnalignedCheckpointType(metricsBuilder, 1L); assertUnalignedCheckpointType(metricsBuilder, 5L); assertUnalignedCheckpointType(metricsBuilder, 10L); assertUnalignedCheckpointType(metricsBuilder, 100L); }
@VisibleForTesting static String generateFile(QualifiedVersion version, String template, List<ChangelogEntry> entries) throws IOException { final List<String> priorVersions = new ArrayList<>(); if (version.minor() > 0) { final int major = version.major(); for (int minor = version.minor() - 1; minor >= 0; minor--) { String majorMinor = major + "." + minor; priorVersions.add("{ref-bare}/" + majorMinor + "/release-highlights.html[" + majorMinor + "]"); } } final Map<Boolean, List<ChangelogEntry.Highlight>> groupedHighlights = entries.stream() .map(ChangelogEntry::getHighlight) .filter(Objects::nonNull) .sorted(Comparator.comparingInt(ChangelogEntry.Highlight::getPr)) .collect(Collectors.groupingBy(ChangelogEntry.Highlight::isNotable, Collectors.toList())); final List<ChangelogEntry.Highlight> notableHighlights = groupedHighlights.getOrDefault(true, List.of()); final List<ChangelogEntry.Highlight> nonNotableHighlights = groupedHighlights.getOrDefault(false, List.of()); final Map<String, Object> bindings = new HashMap<>(); bindings.put("priorVersions", priorVersions); bindings.put("notableHighlights", notableHighlights); bindings.put("nonNotableHighlights", nonNotableHighlights); return TemplateUtils.render(template, bindings); }
@Test public void generateFile_rendersCorrectMarkup() throws Exception { // given: final String template = getResource("/templates/release-highlights.asciidoc"); final String expectedOutput = getResource( "/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc" ); final List<ChangelogEntry> entries = getEntries(); // when: final String actualOutput = ReleaseHighlightsGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries); // then: assertThat(actualOutput, equalTo(expectedOutput)); }
public static String validateSubject(String claimName, String claimValue) throws ValidateException { return validateString(claimName, claimValue); }
@Test public void testValidateSubjectDisallowsEmptyNullAndWhitespace() { assertThrows(ValidateException.class, () -> ClaimValidationUtils.validateSubject("sub", "")); assertThrows(ValidateException.class, () -> ClaimValidationUtils.validateSubject("sub", null)); assertThrows(ValidateException.class, () -> ClaimValidationUtils.validateSubject("sub", " ")); }
@Override public GcsResourceId resolve(String other, ResolveOptions resolveOptions) { checkState( isDirectory(), String.format("Expected the gcsPath is a directory, but had [%s].", gcsPath)); checkArgument( resolveOptions.equals(StandardResolveOptions.RESOLVE_FILE) || resolveOptions.equals(StandardResolveOptions.RESOLVE_DIRECTORY), String.format("ResolveOptions: [%s] is not supported.", resolveOptions)); if (resolveOptions.equals(StandardResolveOptions.RESOLVE_FILE)) { checkArgument( !other.endsWith("/"), "The resolved file: [%s] should not end with '/'.", other); return fromGcsPath(gcsPath.resolve(other)); } else { // StandardResolveOptions.RESOLVE_DIRECTORY if (other.endsWith("/")) { // other already contains the delimiter for gcs. // It is not recommended for callers to set the delimiter. // However, we consider it as a valid input. return fromGcsPath(gcsPath.resolve(other)); } else { return fromGcsPath(gcsPath.resolve(other + "/")); } } }
@Test public void testResolveInvalidNotDirectory() { ResourceId tmpDir = toResourceIdentifier("gs://my_bucket/") .resolve("tmp dir", StandardResolveOptions.RESOLVE_FILE); thrown.expect(IllegalStateException.class); thrown.expectMessage("Expected the gcsPath is a directory, but had [gs://my_bucket/tmp dir]."); tmpDir.resolve("aa", StandardResolveOptions.RESOLVE_FILE); }
@Override public Component getRoot() { checkInitialized(); return this.root; }
@Test public void getRoot_throws_ISE_if_root_has_not_been_set_yet() { assertThatThrownBy(() -> underTest.getRoot()) .isInstanceOf(IllegalStateException.class) .hasMessage("Holder has not been initialized yet"); }
public static void preserve(FileSystem targetFS, Path path, CopyListingFileStatus srcFileStatus, EnumSet<FileAttribute> attributes, boolean preserveRawXattrs) throws IOException { // strip out those attributes we don't need any more attributes.remove(FileAttribute.BLOCKSIZE); attributes.remove(FileAttribute.CHECKSUMTYPE); // If not preserving anything from FileStatus, don't bother fetching it. FileStatus targetFileStatus = attributes.isEmpty() ? null : targetFS.getFileStatus(path); String group = targetFileStatus == null ? null : targetFileStatus.getGroup(); String user = targetFileStatus == null ? null : targetFileStatus.getOwner(); boolean chown = false; if (attributes.contains(FileAttribute.ACL)) { List<AclEntry> srcAcl = srcFileStatus.getAclEntries(); List<AclEntry> targetAcl = getAcl(targetFS, targetFileStatus); if (!srcAcl.equals(targetAcl)) { targetFS.removeAcl(path); targetFS.setAcl(path, srcAcl); } // setAcl doesn't preserve sticky bit, so also call setPermission if needed. if (srcFileStatus.getPermission().getStickyBit() != targetFileStatus.getPermission().getStickyBit()) { targetFS.setPermission(path, srcFileStatus.getPermission()); } } else if (attributes.contains(FileAttribute.PERMISSION) && !srcFileStatus.getPermission().equals(targetFileStatus.getPermission())) { targetFS.setPermission(path, srcFileStatus.getPermission()); } final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR); if (preserveXAttrs || preserveRawXattrs) { final String rawNS = StringUtils.toLowerCase(XAttr.NameSpace.RAW.name()); Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs(); Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path); if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) { for (Entry<String, byte[]> entry : srcXAttrs.entrySet()) { String xattrName = entry.getKey(); if (xattrName.startsWith(rawNS) || preserveXAttrs) { targetFS.setXAttr(path, xattrName, entry.getValue()); } } } } // The replication factor can only be preserved for replicated files. // It is ignored when either the source or target file are erasure coded. if (attributes.contains(FileAttribute.REPLICATION) && !targetFileStatus.isDirectory() && !targetFileStatus.isErasureCoded() && !srcFileStatus.isErasureCoded() && srcFileStatus.getReplication() != targetFileStatus.getReplication()) { targetFS.setReplication(path, srcFileStatus.getReplication()); } if (attributes.contains(FileAttribute.GROUP) && !group.equals(srcFileStatus.getGroup())) { group = srcFileStatus.getGroup(); chown = true; } if (attributes.contains(FileAttribute.USER) && !user.equals(srcFileStatus.getOwner())) { user = srcFileStatus.getOwner(); chown = true; } if (chown) { targetFS.setOwner(path, user, group); } if (attributes.contains(FileAttribute.TIMES)) { targetFS.setTimes(path, srcFileStatus.getModificationTime(), srcFileStatus.getAccessTime()); } }
@Test public void testPreserveReplicationOnDirectory() throws IOException { FileSystem fs = FileSystem.get(config); EnumSet<FileAttribute> attributes = EnumSet.of(FileAttribute.REPLICATION); Path dst = new Path("/tmp/abc"); Path src = new Path("/tmp/src"); createDirectory(fs, src); createDirectory(fs, dst); fs.setPermission(src, fullPerm); fs.setOwner(src, "somebody", "somebody-group"); fs.setReplication(src, (short) 1); fs.setPermission(dst, noPerm); fs.setOwner(dst, "nobody", "nobody-group"); fs.setReplication(dst, (short) 2); CopyListingFileStatus srcStatus = new CopyListingFileStatus(fs.getFileStatus(src)); DistCpUtils.preserve(fs, dst, srcStatus, attributes, false); CopyListingFileStatus dstStatus = new CopyListingFileStatus(fs.getFileStatus(dst)); // FileStatus.equals only compares path field, must explicitly compare all fields Assert.assertFalse(srcStatus.getPermission().equals(dstStatus.getPermission())); Assert.assertFalse(srcStatus.getOwner().equals(dstStatus.getOwner())); Assert.assertFalse(srcStatus.getGroup().equals(dstStatus.getGroup())); // Replication shouldn't apply to dirs so this should still be 0 == 0 Assert.assertTrue(srcStatus.getReplication() == dstStatus.getReplication()); }
public List<KuduPredicate> convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testGe() { ConstantOperator value = ConstantOperator.createInt(5); ScalarOperator op = new BinaryPredicateOperator(BinaryType.GE, F0, value); List<KuduPredicate> result = CONVERTER.convert(op); Assert.assertEquals(result.get(0).toString(), "`f0` >= 5"); }
@Override public SelResult childrenAccept(SelParserVisitor visitor, Object data) { SelResult res = SelResult.NONE; if (children != null) { for (int i = 0; i < children.length; ++i) { res = (SelResult) children[i].jjtAccept(visitor, data); switch (res) { case BREAK: return SelResult.BREAK; case CONTINUE: return SelResult.CONTINUE; case RETURN: return SelResult.RETURN; } } } return res; }
@Test public void testVisitedContinueNode() { root.jjtAddChild(continueNode, 2); root.jjtAddChild(continueNode, 1); root.jjtAddChild(continueNode, 0); SelResult res = root.childrenAccept(null, null); assertEquals(SelResult.CONTINUE, res); assertArrayEquals(new int[] {0, 1, 0, 0, 0}, visited); }
@Override // mappedStatementId 参数,暂时没有用。以后,可以基于 mappedStatementId + DataPermission 进行缓存 public List<DataPermissionRule> getDataPermissionRule(String mappedStatementId) { // 1. 无数据权限 if (CollUtil.isEmpty(rules)) { return Collections.emptyList(); } // 2. 未配置,则默认开启 DataPermission dataPermission = DataPermissionContextHolder.get(); if (dataPermission == null) { return rules; } // 3. 已配置,但禁用 if (!dataPermission.enable()) { return Collections.emptyList(); } // 4. 已配置,只选择部分规则 if (ArrayUtil.isNotEmpty(dataPermission.includeRules())) { return rules.stream().filter(rule -> ArrayUtil.contains(dataPermission.includeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 5. 已配置,只排除部分规则 if (ArrayUtil.isNotEmpty(dataPermission.excludeRules())) { return rules.stream().filter(rule -> !ArrayUtil.contains(dataPermission.excludeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 6. 已配置,全部规则 return rules; }
@Test public void testGetDataPermissionRule_03() { // 准备参数 String mappedStatementId = randomString(); // mock 方法 DataPermissionContextHolder.add(AnnotationUtils.findAnnotation(TestClass03.class, DataPermission.class)); // 调用 List<DataPermissionRule> result = dataPermissionRuleFactory.getDataPermissionRule(mappedStatementId); // 断言 assertTrue(result.isEmpty()); }
@Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException { RequestContext requestContext = RequestContextHolder.getContext(); try { requestContext.getBasicContext().setRequestProtocol(BasicContext.HTTP_PROTOCOL); HttpServletRequest request = (HttpServletRequest) servletRequest; setRequestTarget(request, requestContext); setEncoding(request, requestContext); setAddressContext(request, requestContext); setOtherBasicContext(request, requestContext); filterChain.doFilter(servletRequest, servletResponse); } finally { RequestContextHolder.removeContext(); } }
@Test public void testDoFilterSetsCorrectContextValues() throws Exception { MockNextFilter nextFilter = new MockNextFilter("testApp", "GBK"); filter.doFilter(servletRequest, servletResponse, new MockFilterChain(servlet, nextFilter)); if (null != nextFilter.error) { throw nextFilter.error; } }
@Override public JType apply(String nodeName, JsonNode schemaNode, JsonNode parent, JClassContainer generatableType, Schema schema) { if (schemaNode.has("$ref")) { final String nameFromRef = nameFromRef(schemaNode.get("$ref").asText()); schema = ruleFactory.getSchemaStore().create(schema, schemaNode.get("$ref").asText(), ruleFactory.getGenerationConfig().getRefFragmentPathDelimiters()); schemaNode = schema.getContent(); if (schema.isGenerated()) { return schema.getJavaType(); } return apply(nameFromRef != null ? nameFromRef : nodeName, schemaNode, parent, generatableType, schema); } JType javaType; if (schemaNode.has("enum")) { javaType = ruleFactory.getEnumRule().apply(nodeName, schemaNode, parent, generatableType, schema); } else { javaType = ruleFactory.getTypeRule().apply(nodeName, schemaNode, parent, generatableType.getPackage(), schema); } schema.setJavaTypeIfEmpty(javaType); return javaType; }
@Test public void existingTypeIsUsedWhenTypeIsAlreadyGenerated() throws URISyntaxException { JType previouslyGeneratedType = mock(JType.class); URI schemaUri = getClass().getResource("/schema/address.json").toURI(); SchemaStore schemaStore = new SchemaStore(); Schema schema = schemaStore.create(schemaUri, "#/."); schema.setJavaType(previouslyGeneratedType); final GenerationConfig mockGenerationConfig = mock(GenerationConfig.class); when(mockGenerationConfig.getRefFragmentPathDelimiters()).thenReturn("#/."); when(mockRuleFactory.getSchemaStore()).thenReturn(schemaStore); when(mockRuleFactory.getGenerationConfig()).thenReturn(mockGenerationConfig); ObjectNode schemaNode = new ObjectMapper().createObjectNode(); schemaNode.put("$ref", schemaUri.toString()); JType result = rule.apply(NODE_NAME, schemaNode, null,null, schema); assertThat(result, is(sameInstance(previouslyGeneratedType))); }
public static BootstrapMetadata fromRecords(List<ApiMessageAndVersion> records, String source) { MetadataVersion metadataVersion = null; for (ApiMessageAndVersion record : records) { Optional<MetadataVersion> version = recordToMetadataVersion(record.message()); if (version.isPresent()) { metadataVersion = version.get(); } } if (metadataVersion == null) { throw new RuntimeException("No FeatureLevelRecord for " + MetadataVersion.FEATURE_NAME + " was found in the bootstrap metadata from " + source); } return new BootstrapMetadata(records, metadataVersion, source); }
@Test public void testFromRecordsListWithOldMetadataVersion() { RuntimeException exception = assertThrows(RuntimeException.class, () -> BootstrapMetadata.fromRecords(RECORDS_WITH_OLD_METADATA_VERSION, "quux")); assertEquals("Bootstrap metadata.version before 3.3-IV0 are not supported. Can't load " + "metadata from quux", exception.getMessage()); }
@Override public Iterator<E> descendingIterator() { return underlying().descendingIterator(); }
@Test public void testDelegationOfDescendingIterator() { TreePSet<Integer> testSet = TreePSet.from(Arrays.asList(2, 3, 4)); new PCollectionsTreeSetWrapperDelegationChecker<>() .defineMockConfigurationForFunctionInvocation(TreePSet::descendingIterator, testSet.descendingIterator()) .defineWrapperFunctionInvocationAndMockReturnValueTransformation(PCollectionsImmutableNavigableSet::descendingIterator, identity()) .doFunctionDelegationCheck(); }
@Override @Transactional(rollbackFor = Exception.class) @CacheEvict(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST, allEntries = true) // allEntries 清空所有缓存,因为此时不知道 id 对应的 permission 是多少。直接清理,简单有效 public void deleteMenu(Long id) { // 校验是否还有子菜单 if (menuMapper.selectCountByParentId(id) > 0) { throw exception(MENU_EXISTS_CHILDREN); } // 校验删除的菜单是否存在 if (menuMapper.selectById(id) == null) { throw exception(MENU_NOT_EXISTS); } // 标记删除 menuMapper.deleteById(id); // 删除授予给角色的权限 permissionService.processMenuDeleted(id); }
@Test public void testDeleteMenu_existChildren() { // mock 数据(构造父子菜单) MenuDO sonMenu = createParentAndSonMenu(); // 准备参数 Long parentId = sonMenu.getParentId(); // 调用并断言异常 assertServiceException(() -> menuService.deleteMenu(parentId), MENU_EXISTS_CHILDREN); }
@Override public void write(T record) { recordConsumer.startMessage(); try { messageWriter.writeTopLevelMessage(record); } catch (RuntimeException e) { Message m = (record instanceof Message.Builder) ? ((Message.Builder) record).build() : (Message) record; LOG.error("Cannot write message {}: {}", e.getMessage(), m); throw e; } recordConsumer.endMessage(); }
@Test public void testProto3RepeatedIntMessageEmpty() throws Exception { RecordConsumer readConsumerMock = Mockito.mock(RecordConsumer.class); ProtoWriteSupport<TestProto3.RepeatedIntMessage> instance = createReadConsumerInstance(TestProto3.RepeatedIntMessage.class, readConsumerMock); TestProto3.RepeatedIntMessage.Builder msg = TestProto3.RepeatedIntMessage.newBuilder(); instance.write(msg.build()); InOrder inOrder = Mockito.inOrder(readConsumerMock); inOrder.verify(readConsumerMock).startMessage(); inOrder.verify(readConsumerMock).endMessage(); Mockito.verifyNoMoreInteractions(readConsumerMock); }
@Override public RelativeRange apply(final Period period) { if (period != null) { return RelativeRange.Builder.builder() .from(period.withYears(0).withMonths(0).plusDays(period.getYears() * 365).plusDays(period.getMonths() * 30).toStandardSeconds().getSeconds()) .build(); } else { return null; } }
@Test void testYearsMonthsMixedPeriodConversion() { final RelativeRange result = converter.apply(Period.years(5).plusMonths(1).plusHours(1).plusMinutes(10).plusSeconds(7)); verifyResult(result, ((5 * 365 + 1 * 30) * 24 * 60 * 60) + (60 * 60) + (10 * 60) + 7); }
@Override public String toString() { StringBuilder sb = new StringBuilder("PartitionRuntimeState [" + stamp + "]{" + System.lineSeparator()); for (PartitionReplica replica : allReplicas) { sb.append(replica).append(System.lineSeparator()); } sb.append(", completedMigrations=").append(completedMigrations); sb.append('}'); return sb.toString(); }
@Test public void toString_whenConstructed() throws UnknownHostException { PartitionRuntimeState state = createPartitionState(0, replica("127.0.0.1", 5701), replica("127.0.0.2", 5702) ); assertContains(state.toString(), "127.0.0.1"); assertContains(state.toString(), "127.0.0.2"); }
@ScalarOperator(MODULUS) @SqlType(StandardTypes.BIGINT) public static long modulus(@SqlType(StandardTypes.BIGINT) long left, @SqlType(StandardTypes.BIGINT) long right) { try { return left % right; } catch (ArithmeticException e) { throw new PrestoException(DIVISION_BY_ZERO, e); } }
@Test public void testModulus() { assertFunction("100000000037 % 37", BIGINT, 100000000037L % 37L); assertFunction("37 % 100000000017", BIGINT, 37 % 100000000017L); assertFunction("100000000017 % 37", BIGINT, 100000000017L % 37L); assertFunction("100000000017 % 100000000017", BIGINT, 100000000017L % 100000000017L); }
public String getServiceName() { return serviceName; }
@Test void testGetServiceName() { String eventScope = "scope-001"; String serviceName = "a"; String groupName = "b"; String clusters = "c"; List<Instance> hosts = new ArrayList<>(); Instance ins = new Instance(); hosts.add(ins); InstancesDiff diff = new InstancesDiff(); diff.setAddedInstances(hosts); InstancesChangeEvent event = new InstancesChangeEvent(eventScope, serviceName, groupName, clusters, hosts, diff); assertEquals(eventScope, event.scope()); assertEquals(serviceName, event.getServiceName()); assertEquals(clusters, event.getClusters()); assertEquals(groupName, event.getGroupName()); List<Instance> hosts1 = event.getHosts(); assertEquals(hosts.size(), hosts1.size()); assertEquals(hosts.get(0), hosts1.get(0)); InstancesDiff diff1 = event.getInstancesDiff(); assertTrue(diff1.hasDifferent()); assertEquals(diff.getAddedInstances().size(), diff1.getAddedInstances().size()); assertEquals(diff.getAddedInstances().get(0), diff.getAddedInstances().get(0)); assertEquals(diff.getRemovedInstances().size(), diff1.getRemovedInstances().size()); assertEquals(diff.getModifiedInstances().size(), diff1.getModifiedInstances().size()); }
public void setEncoding(String encoding) { this.encoding = encoding; }
@Test void testSetEncoding() { assertEquals(Constants.ENCODE, basicContext.getEncoding()); basicContext.setEncoding("GBK"); assertEquals("GBK", basicContext.getEncoding()); }
void doSubmit(final Runnable action) { CONTINUATION.get().submit(action); }
@Test public void testActionCalled() { final AtomicBoolean action = new AtomicBoolean(false); Continuations CONT = new Continuations(); CONT.doSubmit(() -> { action.set(true); }); assertTrue(action.get()); }
public SavepointOutputFormat(Path savepointPath) { this.savepointPath = savepointPath; }
@Test public void testSavepointOutputFormat() throws Exception { Path path = new Path(temporaryFolder.newFolder().getAbsolutePath()); SavepointOutputFormat format = createSavepointOutputFormat(path); CheckpointMetadata metadata = createSavepoint(); format.open(0, 1); format.writeRecord(metadata); format.close(); CheckpointMetadata metadataOnDisk = SavepointLoader.loadSavepointMetadata(path.getPath()); Assert.assertEquals( "Incorrect checkpoint id", metadata.getCheckpointId(), metadataOnDisk.getCheckpointId()); Assert.assertEquals( "Incorrect number of operator states in savepoint", metadata.getOperatorStates().size(), metadataOnDisk.getOperatorStates().size()); Assert.assertEquals( "Incorrect operator state in savepoint", metadata.getOperatorStates().iterator().next(), metadataOnDisk.getOperatorStates().iterator().next()); }
public static List<String> getTables(DataSource ds) { return getTables(ds, TableType.TABLE); }
@Test public void getTablesTest() { final List<String> tables = MetaUtil.getTables(ds); assertEquals("user", tables.get(0)); }
static void populateEvaluateNodeWithNodeFunctions(final BlockStmt toPopulate, final List<String> nestedNodesFullClasses) { final MethodCallExpr valuesInit = new MethodCallExpr(); if (nestedNodesFullClasses.isEmpty()) { valuesInit.setScope(new TypeExpr(parseClassOrInterfaceType(Collections.class.getName()))); valuesInit.setName(EMPTY_LIST); } else { final NodeList<Expression> methodReferenceExprs = NodeList.nodeList(nestedNodesFullClasses.stream() .map(KiePMMLNodeFactory::getEvaluateNodeMethodReference) .collect(Collectors.toList())); valuesInit.setScope(new TypeExpr(parseClassOrInterfaceType(Arrays.class.getName()))); valuesInit.setName(AS_LIST); valuesInit.setArguments(methodReferenceExprs); } CommonCodegenUtils.setVariableDeclaratorValue(toPopulate, NODE_FUNCTIONS, valuesInit); }
@Test void populateEvaluateNodeWithNodeFunctions() { final BlockStmt toPopulate = new BlockStmt(); final VariableDeclarator variableDeclarator = new VariableDeclarator(); variableDeclarator.setType("Object"); variableDeclarator.setName(NODE_FUNCTIONS); toPopulate.addStatement(new VariableDeclarationExpr(variableDeclarator)); assertThat(variableDeclarator.getInitializer()).isNotPresent(); // empty list List<String> nestedNodesFullClasses = Collections.emptyList(); KiePMMLNodeFactory.populateEvaluateNodeWithNodeFunctions(toPopulate, nestedNodesFullClasses); commonVerifyEvaluateNodeWithNodeFunctions(variableDeclarator, nestedNodesFullClasses); // populated list nestedNodesFullClasses = IntStream.range(0, 2) .mapToObj(i -> "full.node.NodeClassName" + i) .collect(Collectors.toList()); KiePMMLNodeFactory.populateEvaluateNodeWithNodeFunctions(toPopulate, nestedNodesFullClasses); commonVerifyEvaluateNodeWithNodeFunctions(variableDeclarator, nestedNodesFullClasses); }
@Override public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) { String propertyTypeName = getTypeName(node); JType type; if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) { type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else if (node.has("existingJavaType")) { String typeName = node.path("existingJavaType").asText(); if (isPrimitive(typeName, jClassContainer.owner())) { type = primitiveType(typeName, jClassContainer.owner()); } else { type = resolveType(jClassContainer, typeName); } } else if (propertyTypeName.equals("string")) { type = jClassContainer.owner().ref(String.class); } else if (propertyTypeName.equals("number")) { type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("integer")) { type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("boolean")) { type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("array")) { type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else { type = jClassContainer.owner().ref(Object.class); } if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) { type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema); } else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) { type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema); } return type; }
@Test public void applyChoosesObjectOnUnrecognizedType() { JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName()); ObjectNode objectNode = new ObjectMapper().createObjectNode(); objectNode.put("type", "unknown"); JType result = rule.apply("fooBar", objectNode, null, jpackage, null); assertThat(result.fullName(), is(Object.class.getName())); }
@Override protected TableRecords getUndoRows() { return super.getUndoRows(); }
@Test public void getUndoRows() { Assertions.assertEquals(executor.getUndoRows(), executor.getSqlUndoLog().getBeforeImage()); }
protected final void safeRegister(final Class type, final Serializer serializer) { safeRegister(type, createSerializerAdapter(serializer)); }
@Test(expected = IllegalStateException.class) public void testSafeRegister_alreadyRegisteredTypeId() { abstractSerializationService.safeRegister(StringBuffer.class, new StringBufferSerializer(true)); abstractSerializationService.safeRegister(StringBuilder.class, new TheOtherGlobalSerializer(true)); }
public double alignOrientation(double baseOrientation, double orientation) { double resultOrientation; if (baseOrientation >= 0) { if (orientation < -Math.PI + baseOrientation) resultOrientation = orientation + 2 * Math.PI; else resultOrientation = orientation; } else if (orientation > +Math.PI + baseOrientation) resultOrientation = orientation - 2 * Math.PI; else resultOrientation = orientation; return resultOrientation; }
@Test public void testAlignOrientation() { assertEquals(90.0, Math.toDegrees(AC.alignOrientation(Math.toRadians(90), Math.toRadians(90))), 0.001); assertEquals(225.0, Math.toDegrees(AC.alignOrientation(Math.toRadians(90), Math.toRadians(-135))), 0.001); assertEquals(-45.0, Math.toDegrees(AC.alignOrientation(Math.toRadians(-135), Math.toRadians(-45))), 0.001); assertEquals(-270.0, Math.toDegrees(AC.alignOrientation(Math.toRadians(-135), Math.toRadians(90))), 0.001); }
@Nullable @Override public Message decode(@Nonnull final RawMessage rawMessage) { final GELFMessage gelfMessage = new GELFMessage(rawMessage.getPayload(), rawMessage.getRemoteAddress()); final String json = gelfMessage.getJSON(decompressSizeLimit, charset); final JsonNode node; try { node = objectMapper.readTree(json); if (node == null) { throw new IOException("null result"); } } catch (final Exception e) { log.error("Could not parse JSON, first 400 characters: " + StringUtils.abbreviate(json, 403), e); throw new IllegalStateException("JSON is null/could not be parsed (invalid JSON)", e); } try { validateGELFMessage(node, rawMessage.getId(), rawMessage.getRemoteAddress()); } catch (IllegalArgumentException e) { log.trace("Invalid GELF message <{}>", node); throw e; } // Timestamp. final double messageTimestamp = timestampValue(node); final DateTime timestamp; if (messageTimestamp <= 0) { timestamp = rawMessage.getTimestamp(); } else { // we treat this as a unix timestamp timestamp = Tools.dateTimeFromDouble(messageTimestamp); } final Message message = messageFactory.createMessage( stringValue(node, "short_message"), stringValue(node, "host"), timestamp ); message.addField(Message.FIELD_FULL_MESSAGE, stringValue(node, "full_message")); final String file = stringValue(node, "file"); if (file != null && !file.isEmpty()) { message.addField("file", file); } final long line = longValue(node, "line"); if (line > -1) { message.addField("line", line); } // Level is set by server if not specified by client. final int level = intValue(node, "level"); if (level > -1) { message.addField("level", level); } // Facility is set by server if not specified by client. final String facility = stringValue(node, "facility"); if (facility != null && !facility.isEmpty()) { message.addField("facility", facility); } // Add additional data if there is some. final Iterator<Map.Entry<String, JsonNode>> fields = node.fields(); while (fields.hasNext()) { final Map.Entry<String, JsonNode> entry = fields.next(); String key = entry.getKey(); // Do not index useless GELF "version" field. if ("version".equals(key)) { continue; } // Don't include GELF syntax underscore in message field key. if (key.startsWith("_") && key.length() > 1) { key = key.substring(1); } // We already set short_message and host as message and source. Do not add as fields again. if ("short_message".equals(key) || "host".equals(key)) { continue; } // Skip standard or already set fields. if (message.getField(key) != null || Message.RESERVED_FIELDS.contains(key) && !Message.RESERVED_SETTABLE_FIELDS.contains(key)) { continue; } // Convert JSON containers to Strings, and pick a suitable number representation. final JsonNode value = entry.getValue(); final Object fieldValue; if (value.isContainerNode()) { fieldValue = value.toString(); } else if (value.isFloatingPointNumber()) { fieldValue = value.asDouble(); } else if (value.isIntegralNumber()) { fieldValue = value.asLong(); } else if (value.isNull()) { log.debug("Field [{}] is NULL. Skipping.", key); continue; } else if (value.isTextual()) { fieldValue = value.asText(); } else { log.debug("Field [{}] has unknown value type. Skipping.", key); continue; } message.addField(key, fieldValue); } return message; }
@Test public void decodeSucceedsWithEmptyShortMessageButWithMessage() throws Exception { final String json = "{" + "\"version\": \"1.1\"," + "\"host\": \"example.org\"," + "\"short_message\": \"\"," + "\"message\": \"A short message that helps you identify what is going on\"" + "}"; final RawMessage rawMessage = new RawMessage(json.getBytes(StandardCharsets.UTF_8)); final Message message = codec.decode(rawMessage); assertThat(message).isNotNull(); }
public void reportMeasurement(final long durationNs) { if (!maxCycleTime.isClosed()) { maxCycleTime.proposeMaxOrdered(durationNs); if (durationNs > cycleTimeThresholdNs) { cycleTimeThresholdExceededCount.incrementOrdered(); } } }
@Test void reportMeasurementOnlyUpdatesThresholdCounterWhenExceeded() { final AtomicCounter maxCycleTime = mock(AtomicCounter.class); when(maxCycleTime.isClosed()).thenReturn(false); final AtomicCounter cycleTimeThresholdExceededCount = mock(AtomicCounter.class); final int cycleTimeThresholdNs = 1000; final DutyCycleStallTracker dutyCycleStallTracker = new DutyCycleStallTracker(maxCycleTime, cycleTimeThresholdExceededCount, cycleTimeThresholdNs); dutyCycleStallTracker.reportMeasurement(555); dutyCycleStallTracker.reportMeasurement(1000); dutyCycleStallTracker.reportMeasurement(1001); verify(maxCycleTime, times(3)).isClosed(); verify(maxCycleTime).proposeMaxOrdered(555L); verify(maxCycleTime).proposeMaxOrdered(1000L); verify(maxCycleTime).proposeMaxOrdered(1001L); verify(cycleTimeThresholdExceededCount).incrementOrdered(); verifyNoMoreInteractions(maxCycleTime, cycleTimeThresholdExceededCount); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Override public ClusterInfo get() { return getClusterInfo(); }
@Test public void testInfo() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("cluster") .path("info").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); verifyClusterInfo(json); }
@Override public boolean offer(V e, long timeout, TimeUnit unit) throws InterruptedException { RedissonQueueSemaphore semaphore = createSemaphore(e); return semaphore.tryAcquire(timeout, unit); }
@Test public void testOffer() { RBoundedBlockingQueue<Integer> queue = redisson.getBoundedBlockingQueue("blocking:queue"); assertThat(queue.trySetCapacity(2)).isTrue(); assertThat(queue.offer(1)).isTrue(); assertThat(queue.offer(2)).isTrue(); assertThat(queue.offer(3)).isFalse(); assertThat(queue.offer(4)).isFalse(); }
public static Set<X509Certificate> filterValid( X509Certificate... certificates ) { final Set<X509Certificate> results = new HashSet<>(); if (certificates != null) { for ( X509Certificate certificate : certificates ) { if ( certificate == null ) { continue; } try { certificate.checkValidity(); } catch ( CertificateExpiredException | CertificateNotYetValidException e ) { // Not yet or no longer valid. Don't include in result. continue; } results.add( certificate ); } } return results; }
@Test public void testFilterValidNull() throws Exception { // Setup fixture. final Collection<X509Certificate> input = null; // Execute system under test. final Collection<X509Certificate> result = CertificateUtils.filterValid( input ); // Verify results. assertTrue( result.isEmpty() ); }
@Override public String generateSqlType(Dialect dialect) { return switch (dialect.getId()) { case MsSql.ID -> "VARBINARY(MAX)"; case Oracle.ID, H2.ID -> "BLOB"; case PostgreSql.ID -> "BYTEA"; default -> throw new IllegalArgumentException("Unsupported dialect id " + dialect.getId()); }; }
@Test public void generateSqlType_for_MsSql() { assertThat(underTest.generateSqlType(new MsSql())).isEqualTo("VARBINARY(MAX)"); }
public RestEmitter getRestEmitter() { if (contains(META_SYNC_DATAHUB_EMITTER_SUPPLIER_CLASS)) { return ((DataHubEmitterSupplier) ReflectionUtils.loadClass(getString(META_SYNC_DATAHUB_EMITTER_SUPPLIER_CLASS))).get(); } else if (contains(META_SYNC_DATAHUB_EMITTER_SERVER)) { return RestEmitter.create(b -> b.server(getString(META_SYNC_DATAHUB_EMITTER_SERVER)).token(getStringOrDefault(META_SYNC_DATAHUB_EMITTER_TOKEN, null))); } else { return RestEmitter.createWithDefaults(); } }
@Test void testGetEmitterFromSupplier() { Properties props = new Properties(); props.setProperty(META_SYNC_DATAHUB_EMITTER_SUPPLIER_CLASS.key(), DummySupplier.class.getName()); DataHubSyncConfig syncConfig = new DataHubSyncConfig(props); assertNotNull(syncConfig.getRestEmitter()); }
public void addIndexes(int maxIndex, int[] dictionaryIndexes, int indexCount) { if (indexCount == 0 && indexRetainedBytes > 0) { // Ignore empty segment, since there are other segments present. return; } checkState(maxIndex >= lastMaxIndex, "LastMax is greater than the current max"); lastMaxIndex = maxIndex; if (maxIndex <= Byte.MAX_VALUE) { byte[] byteIndexes = new byte[indexCount]; for (int i = 0; i < indexCount; i++) { byteIndexes[i] = (byte) dictionaryIndexes[i]; } appendByteIndexes(byteIndexes); } else if (maxIndex <= Short.MAX_VALUE) { short[] shortIndexes = new short[indexCount]; for (int i = 0; i < indexCount; i++) { shortIndexes[i] = (short) dictionaryIndexes[i]; } appendShortIndexes(shortIndexes); } else { int[] intIndexes = Arrays.copyOf(dictionaryIndexes, indexCount); appendIntegerIndexes(intIndexes); } }
@Test public void testIntegerIndexes() { int[] dictionaryIndexes = createIndexArray(Integer.MAX_VALUE, MAX_DICTIONARY_INDEX); for (int length : ImmutableList.of(0, 10, dictionaryIndexes.length)) { DictionaryRowGroupBuilder rowGroupBuilder = new DictionaryRowGroupBuilder(); rowGroupBuilder.addIndexes(Integer.MAX_VALUE, dictionaryIndexes, length); int[] intIndexes = getIntegerIndexes(rowGroupBuilder); assertEquals(length, intIndexes.length); for (int i = 0; i < length; i++) { assertEquals(dictionaryIndexes[i], intIndexes[i]); } } }
@Override public String toString() { return getClass().getSimpleName() + " barCount: " + barCount; }
@Test public void onlyNaNValues() { BarSeries series = new BaseBarSeriesBuilder().withNumTypeOf(numFunction).withName("NaN test").build(); for (long i = 0; i <= 1000; i++) { series.addBar(ZonedDateTime.now().plusDays(i), NaN, NaN, NaN, NaN, NaN); } AroonDownIndicator aroonDownIndicator = new AroonDownIndicator(series, 5); for (int i = series.getBeginIndex(); i <= series.getEndIndex(); i++) { assertEquals(NaN.toString(), aroonDownIndicator.getValue(i).toString()); } }
@Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { Map<String, String> mdcContextMap = getMdcContextMap(); return super.schedule(ContextPropagator.decorateRunnable(contextPropagators, () -> { try { setMDCContext(mdcContextMap); command.run(); } finally { MDC.clear(); } }), delay, unit); }
@Test public void testScheduleCallablePropagatesContext() { TestThreadLocalContextHolder.put("ValueShouldCrossThreadBoundary"); final ScheduledFuture<?> schedule = schedulerService.schedule(() -> TestThreadLocalContextHolder.get().orElse(null), 0, TimeUnit.MILLISECONDS); waitAtMost(1, TimeUnit.SECONDS).until(matches(() -> assertThat(schedule.get()).isEqualTo("ValueShouldCrossThreadBoundary"))); }
public OpenAPI filter(OpenAPI openAPI, OpenAPISpecFilter filter, Map<String, List<String>> params, Map<String, String> cookies, Map<String, List<String>> headers) { OpenAPI filteredOpenAPI = filterOpenAPI(filter, openAPI, params, cookies, headers); if (filteredOpenAPI == null) { return filteredOpenAPI; } OpenAPI clone = new OpenAPI(); clone.info(filteredOpenAPI.getInfo()); clone.openapi(filteredOpenAPI.getOpenapi()); clone.jsonSchemaDialect(filteredOpenAPI.getJsonSchemaDialect()); clone.setSpecVersion(filteredOpenAPI.getSpecVersion()); clone.setExtensions(filteredOpenAPI.getExtensions()); clone.setExternalDocs(filteredOpenAPI.getExternalDocs()); clone.setSecurity(filteredOpenAPI.getSecurity()); clone.setServers(filteredOpenAPI.getServers()); clone.tags(filteredOpenAPI.getTags() == null ? null : new ArrayList<>(openAPI.getTags())); final Set<String> allowedTags = new HashSet<>(); final Set<String> filteredTags = new HashSet<>(); Paths clonedPaths = new Paths(); if (filteredOpenAPI.getPaths() != null) { for (String resourcePath : filteredOpenAPI.getPaths().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clonedPaths.addPathItem(resourcePath, clonedPathItem); } } } clone.paths(clonedPaths); } filteredTags.removeAll(allowedTags); final List<Tag> tags = clone.getTags(); if (tags != null && !filteredTags.isEmpty()) { tags.removeIf(tag -> filteredTags.contains(tag.getName())); if (clone.getTags().isEmpty()) { clone.setTags(null); } } if (filteredOpenAPI.getWebhooks() != null) { for (String resourcePath : filteredOpenAPI.getWebhooks().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clone.addWebhooks(resourcePath, clonedPathItem); } } } } if (filteredOpenAPI.getComponents() != null) { clone.components(new Components()); clone.getComponents().setSchemas(filterComponentsSchema(filter, filteredOpenAPI.getComponents().getSchemas(), params, cookies, headers)); clone.getComponents().setSecuritySchemes(filteredOpenAPI.getComponents().getSecuritySchemes()); clone.getComponents().setCallbacks(filteredOpenAPI.getComponents().getCallbacks()); clone.getComponents().setExamples(filteredOpenAPI.getComponents().getExamples()); clone.getComponents().setExtensions(filteredOpenAPI.getComponents().getExtensions()); clone.getComponents().setHeaders(filteredOpenAPI.getComponents().getHeaders()); clone.getComponents().setLinks(filteredOpenAPI.getComponents().getLinks()); clone.getComponents().setParameters(filteredOpenAPI.getComponents().getParameters()); clone.getComponents().setRequestBodies(filteredOpenAPI.getComponents().getRequestBodies()); clone.getComponents().setResponses(filteredOpenAPI.getComponents().getResponses()); clone.getComponents().setPathItems(filteredOpenAPI.getComponents().getPathItems()); } if (filter.isRemovingUnreferencedDefinitions()) { clone = removeBrokenReferenceDefinitions(clone); } return clone; }
@Test(description = "it should filter away the pet resource") public void filterAwayPetResource() throws IOException { final OpenAPI openAPI = getOpenAPI(RESOURCE_PATH); final NoPetOperationsFilter filter = new NoPetOperationsFilter(); final OpenAPI filtered = new SpecFilter().filter(openAPI, filter, null, null, null); if (filtered.getPaths() != null) { for (Map.Entry<String, PathItem> entry : filtered.getPaths().entrySet()) { assertNull(entry.getValue().getDelete()); assertNull(entry.getValue().getPost()); assertNull(entry.getValue().getPut()); assertNull(entry.getValue().getGet()); assertNull(entry.getValue().getHead()); assertNull(entry.getValue().getOptions()); } } else { fail("paths should not be null"); } }
@Override public UserSession authenticate(HttpRequest request, HttpResponse response) { UserAuthResult userAuthResult = loadUser(request, response); if (nonNull(userAuthResult.getUserDto())) { if (TOKEN.equals(userAuthResult.getAuthType())) { return userSessionFactory.create(userAuthResult.getUserDto(), userAuthResult.getTokenDto()); } boolean isAuthenticatedBrowserSession = JWT.equals(userAuthResult.getAuthType()); return userSessionFactory.create(userAuthResult.getUserDto(), isAuthenticatedBrowserSession); } else if (GITHUB_WEBHOOK.equals(userAuthResult.getAuthType())) { return userSessionFactory.createGithubWebhookUserSession(); } return userSessionFactory.createAnonymous(); }
@Test public void authenticate_from_jwt_token() { when(httpHeadersAuthentication.authenticate(request, response)).thenReturn(Optional.empty()); when(jwtHttpHandler.validateToken(request, response)).thenReturn(Optional.of(A_USER)); UserSession userSession = underTest.authenticate(request, response); assertThat(userSession.getUuid()).isEqualTo(A_USER.getUuid()); assertThat(userSession.isAuthenticatedBrowserSession()).isTrue(); verify(response, never()).setStatus(anyInt()); }
@Override public TableCellByTypeTransformer tableCellByTypeTransformer() { return transformer; }
@Test void can_transform_string_to_empty() throws Throwable { Method method = JavaDefaultDataTableCellTransformerDefinitionTest.class.getMethod("transform_string_to_type", String.class, Type.class); JavaDefaultDataTableCellTransformerDefinition definition = new JavaDefaultDataTableCellTransformerDefinition( method, lookup, new String[] { "[empty]" }); Object transformed = definition.tableCellByTypeTransformer().transform("[empty]", String.class); assertThat(transformed, is("transform_string_to_type=")); }
static long sizeOf(Mutation m) { if (m.getOperation() == Mutation.Op.DELETE) { return sizeOf(m.getKeySet()); } long result = 0; for (Value v : m.getValues()) { switch (v.getType().getCode()) { case ARRAY: result += estimateArrayValue(v); break; case STRUCT: throw new IllegalArgumentException("Structs are not supported in mutation."); default: result += estimatePrimitiveValue(v); } } return result; }
@Test public void group() throws Exception { Mutation int64 = Mutation.newInsertOrUpdateBuilder("test").set("one").to(1).build(); Mutation float32 = Mutation.newInsertOrUpdateBuilder("test").set("one").to(1.3f).build(); Mutation float64 = Mutation.newInsertOrUpdateBuilder("test").set("one").to(2.9).build(); Mutation bool = Mutation.newInsertOrUpdateBuilder("test").set("one").to(false).build(); MutationGroup group = MutationGroup.create(int64, float32, float64, bool); assertThat(MutationSizeEstimator.sizeOf(group), is(21L)); }
@Override public long getCreationTime() { return creationTime; }
@Test public void testCreationTime() { long beforeCreationTime = Clock.currentTimeMillis(); AbstractLocalCollectionStats localListStats = createTestStats(); long afterCreationTime = Clock.currentTimeMillis(); assertBetween("creationTime", localListStats.getCreationTime(), beforeCreationTime, afterCreationTime); }
@Override public TimeValue getRetryInterval(HttpResponse response, int execCount, HttpContext context) { // a server may send a 429 / 503 with a Retry-After header // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After Header header = response.getFirstHeader(HttpHeaders.RETRY_AFTER); TimeValue retryAfter = null; if (header != null) { String value = header.getValue(); try { retryAfter = TimeValue.ofSeconds(Long.parseLong(value)); } catch (NumberFormatException ignore) { Instant retryAfterDate = DateUtils.parseStandardDate(value); if (retryAfterDate != null) { retryAfter = TimeValue.ofMilliseconds(retryAfterDate.toEpochMilli() - System.currentTimeMillis()); } } if (TimeValue.isPositive(retryAfter)) { return retryAfter; } } int delayMillis = 1000 * (int) Math.min(Math.pow(2.0, (long) execCount - 1.0), 64.0); int jitter = ThreadLocalRandom.current().nextInt(Math.max(1, (int) (delayMillis * 0.1))); return TimeValue.ofMilliseconds(delayMillis + jitter); }
@Test public void retryAfterHeaderAsLong() { HttpResponse response = new BasicHttpResponse(503, "Oopsie"); response.setHeader(HttpHeaders.RETRY_AFTER, "321"); assertThat(retryStrategy.getRetryInterval(response, 3, null).toSeconds()).isEqualTo(321L); }
@Override public AppResponse processAction(String flowType, BaseAction action, AppRequest request, AppSession appSession) throws FlowStateNotDefinedException, FlowNotDefinedException, SharedServiceClientException, NoSuchAlgorithmException, IOException { Flow flow = flowFactoryFactory.getFactory(flowType).getFlow(ConfirmSessionFlow.NAME); AbstractFlowStep flowStep = flow.validateStateTransition(stateValueOf(appSession.getState().toUpperCase()), action); if (flowStep == null) { logger.error("nl.logius.digid.app.domain.shared.flow transition not allowed:{} - {} -> {}", flow.getClass(), appSession.getState(), action); return new NokResponse("nl.logius.digid.app.domain.shared.flow transition not allowed"); } flowStep.setAppSession(appSession); if (flowStep.expectAppAuthenticator()) { flowStep.setAppAuthenticator(getAppAuthenticator(appSession)); } AppResponse appResponse = flow.processState(flowStep, request); if (appResponse instanceof NokResponse || !flowStep.isValid()) { return appResponse; } appSession.setState(getStateName(flow.getNextState(stateValueOf(appSession.getState().toUpperCase()), action))); if (flowStep.getAppAuthenticator() != null) { appAuthenticatorService.save(flowStep.getAppAuthenticator()); if (appSession.getDeviceName() == null) { appSession.setDeviceName(flowStep.getAppAuthenticator().getDeviceName()); appSession.setAppCode(flowStep.getAppAuthenticator().getAppCode()); } } appSessionService.save(appSession); return appResponse; }
@Test public void processActionNokResponseTest() throws FlowNotDefinedException, SharedServiceClientException, IOException, NoSuchAlgorithmException, FlowStateNotDefinedException { //given when(flow.validateStateTransition(any(), any())).thenReturn(null); //when AppResponse appResponse = confirmationFlowService.processAction("confirm", Action.CONFIRM, confirmRequest, appSession); //then assertTrue(appResponse instanceof NokResponse); }
@Override public String decrypt(String cipherText) throws CryptoException { return decrypt(cipherProvider.getKey(), cipherText); }
@Test public void shouldErrorOutWhenCipherTextIsTamperedWith() { assertThatCode(() -> desEncrypter.decrypt("some bad junk")) .hasMessageContaining("Illegal base64 character 20") .hasCauseInstanceOf(IllegalArgumentException.class) .isInstanceOf(CryptoException.class); }
public static Combine.BinaryCombineLongFn ofLongs() { return new Max.MaxLongFn(); }
@Test public void testMaxLongFn() { testCombineFn(Max.ofLongs(), Lists.newArrayList(1L, 2L, 3L, 4L), 4L); }
public static Map<String, Object> beanToMap(Object bean, String... properties) { int mapSize = 16; Editor<String> keyEditor = null; if (ArrayUtil.isNotEmpty(properties)) { mapSize = properties.length; final Set<String> propertiesSet = CollUtil.set(false, properties); keyEditor = property -> propertiesSet.contains(property) ? property : null; } // 指明了要复制的属性 所以不忽略null值 return beanToMap(bean, new LinkedHashMap<>(mapSize, 1), false, keyEditor); }
@Test public void beanToMapTest() { final SubPerson person = new SubPerson(); person.setAge(14); person.setOpenid("11213232"); person.setName("测试A11"); person.setSubName("sub名字"); final Map<String, Object> map = BeanUtil.beanToMap(person); assertEquals("测试A11", map.get("name")); assertEquals(14, map.get("age")); assertEquals("11213232", map.get("openid")); // static属性应被忽略 assertFalse(map.containsKey("SUBNAME")); }
public static ICA fit(double[][] data, int p) { return fit(data, p, new Properties()); }
@Test public void test() throws Exception { System.out.println("ICA"); MathEx.setSeed(19650218); // to get repeatable results. CSVFormat format = CSVFormat.Builder.create().build(); CSV csv = new CSV(format); double[][] data = csv.read(Paths.getTestData("ica/ica.csv")).toArray(false, CategoricalEncoder.DUMMY); ICA ica = ICA.fit(MathEx.transpose(data), 2); assertEquals(2, ica.components.length); assertEquals(data.length, ica.components[0].length); assertEquals( 0.02003, ica.components[0][0], 1E-5); assertEquals(-0.03275, ica.components[1][0], 1E-5); assertEquals(-0.01140, ica.components[0][1], 1E-5); assertEquals(-0.01084, ica.components[1][1], 1E-5); }
public CacheConfig<K, V> setName(String name) { this.name = name; return this; }
@Test public void testGetPreConfiguredCache() { Config config = new Config(); config.addCacheConfig(new CacheSimpleConfig().setName("test")); int count = 4; TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(count); for (int i = 0; i < count; i++) { HazelcastInstance instance = factory.newHazelcastInstance(config); CachingProvider provider = createServerCachingProvider(instance); CacheManager cacheManager = provider.getCacheManager(); Cache<Object, Object> cache = cacheManager.getCache("test"); assertNotNull("Pre-configured cache cannot be retrieved on instance: " + i, cache); } }
public static URL urlForResource(String location) throws MalformedURLException, FileNotFoundException { if (location == null) { throw new NullPointerException("location is required"); } URL url = null; if (!location.matches(SCHEME_PATTERN)) { url = Loader.getResourceBySelfClassLoader(location); } else if (location.startsWith(CLASSPATH_SCHEME)) { String path = location.substring(CLASSPATH_SCHEME.length()); if (path.startsWith("/")) { path = path.substring(1); } if (path.length() == 0) { throw new MalformedURLException("path is required"); } url = Loader.getResourceBySelfClassLoader(path); } else { url = new URL(location); } if (url == null) { throw new FileNotFoundException(location); } return url; }
@Test(expected = MalformedURLException.class) public void testExplicitClasspathUrlEmptyPath() throws Exception { LocationUtil.urlForResource(LocationUtil.CLASSPATH_SCHEME); }
@Override public double distanceBtw(Point p1, Point p2) { numCalls++; confirmRequiredDataIsPresent(p1); confirmRequiredDataIsPresent(p2); Duration timeDelta = Duration.between(p1.time(), p2.time()); //can be positive of negative timeDelta = timeDelta.abs(); Double horizontalDistanceInNm = p1.distanceInNmTo(p2); Double horizontalDistanceInFeet = horizontalDistanceInNm * Spherical.feetPerNM(); Double altitudeDifferenceInFeet = Math.abs(p1.altitude().inFeet() - p2.altitude().inFeet()); Double distInFeet = hypot(horizontalDistanceInFeet, altitudeDifferenceInFeet); return (distanceCoef * distInFeet) + (timeCoef * timeDelta.toMillis()); }
@Test public void testTimeComputation() { PointDistanceMetric metric = new PointDistanceMetric(1.0, 1.0); PointDistanceMetric metric2 = new PointDistanceMetric(2.0, 1.0); Instant time1 = Instant.EPOCH; Instant time2 = time1.plusSeconds(1L); Point p1 = new PointBuilder() .latLong(0.0, 0.0) .altitude(Distance.ofFeet(0.0)) .time(time1) .build(); Point p2 = new PointBuilder() .latLong(0.0, 0.0) .altitude(Distance.ofFeet(0.0)) .time(time2) .build(); double TOLERANCE = 0.00001; assertEquals( 1000.0, metric.distanceBtw(p1, p2), TOLERANCE, "A 1 second time difference should produce a distance of 1000.0 (when coef = 1.0)" ); assertEquals( 1000.0, metric.distanceBtw(p2, p1), TOLERANCE, "Switching the points shouldn't change the distance measurement" ); assertEquals( 2000.0, metric2.distanceBtw(p1, p2), TOLERANCE, "A 1 second time difference should produce a distance of 2000.0 (when coef = 2.0)" ); assertEquals( 2000.0, metric2.distanceBtw(p2, p1), TOLERANCE, "Switching the points shouldn't change the distance measurement" ); }
@VisibleForTesting JobMeta filterPrivateDatabases( JobMeta jobMeta ) { Set<String> privateDatabases = jobMeta.getPrivateDatabases(); if ( privateDatabases != null ) { // keep only private transformation databases for ( Iterator<DatabaseMeta> it = jobMeta.getDatabases().iterator(); it.hasNext(); ) { DatabaseMeta databaseMeta = it.next(); String databaseName = databaseMeta.getName(); if ( !privateDatabases.contains( databaseName ) && !jobMeta.isDatabaseConnectionUsed( databaseMeta ) ) { it.remove(); } } } return jobMeta; }
@Test public void filterPrivateDatabasesWithOnePrivateDatabaseAndOneInUseTest() { IUnifiedRepository purMock = mock( IUnifiedRepository.class ); JobMeta jobMeta = spy( JobMeta.class ); jobMeta.setDatabases( getDummyDatabases() ); Set<String> privateDatabases = new HashSet<>( ); privateDatabases.add( "database2" ); jobMeta.setPrivateDatabases( privateDatabases ); when( jobMeta.isDatabaseConnectionUsed( getDummyDatabases().get( 0 ) ) ).thenReturn( true ); StreamToJobNodeConverter jobConverter = new StreamToJobNodeConverter( purMock ); assertEquals( 2, jobConverter.filterPrivateDatabases( jobMeta ).getDatabases().size() ); }