language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/RemotingCommand.java
|
{
"start": 1921,
"end": 22936
}
|
class ____ {
public static final String SERIALIZE_TYPE_PROPERTY = "rocketmq.serialize.type";
public static final String SERIALIZE_TYPE_ENV = "ROCKETMQ_SERIALIZE_TYPE";
public static final String REMOTING_VERSION_KEY = "rocketmq.remoting.version";
static final Logger log = LoggerFactory.getLogger(LoggerName.ROCKETMQ_REMOTING_NAME);
private static final int RPC_TYPE = 0; // 0, REQUEST_COMMAND
private static final int RPC_ONEWAY = 1; // 0, RPC
private static final Map<Class<? extends CommandCustomHeader>, Field[]> CLASS_HASH_MAP =
new HashMap<>();
private static final Map<Class, String> CANONICAL_NAME_CACHE = new HashMap<>();
// 1, Oneway
// 1, RESPONSE_COMMAND
private static final Map<Field, Boolean> NULLABLE_FIELD_CACHE = new HashMap<>();
private static final String STRING_CANONICAL_NAME = String.class.getCanonicalName();
private static final String DOUBLE_CANONICAL_NAME_1 = Double.class.getCanonicalName();
private static final String DOUBLE_CANONICAL_NAME_2 = double.class.getCanonicalName();
private static final String INTEGER_CANONICAL_NAME_1 = Integer.class.getCanonicalName();
private static final String INTEGER_CANONICAL_NAME_2 = int.class.getCanonicalName();
private static final String LONG_CANONICAL_NAME_1 = Long.class.getCanonicalName();
private static final String LONG_CANONICAL_NAME_2 = long.class.getCanonicalName();
private static final String BOOLEAN_CANONICAL_NAME_1 = Boolean.class.getCanonicalName();
private static final String BOOLEAN_CANONICAL_NAME_2 = boolean.class.getCanonicalName();
private static final String BOUNDARY_TYPE_CANONICAL_NAME = BoundaryType.class.getCanonicalName();
private static volatile int configVersion = -1;
private static AtomicInteger requestId = new AtomicInteger(0);
private static SerializeType serializeTypeConfigInThisServer = SerializeType.JSON;
static {
final String protocol = System.getProperty(SERIALIZE_TYPE_PROPERTY, System.getenv(SERIALIZE_TYPE_ENV));
if (!StringUtils.isBlank(protocol)) {
try {
serializeTypeConfigInThisServer = SerializeType.valueOf(protocol);
} catch (IllegalArgumentException e) {
throw new RuntimeException("parser specified protocol error. protocol=" + protocol, e);
}
}
}
private int code;
private LanguageCode language = LanguageCode.JAVA;
private int version = 0;
private int opaque = requestId.getAndIncrement();
private int flag = 0;
private String remark;
private HashMap<String, String> extFields;
private transient CommandCustomHeader customHeader;
private transient CommandCustomHeader cachedHeader;
private SerializeType serializeTypeCurrentRPC = serializeTypeConfigInThisServer;
private transient byte[] body;
private boolean suspended;
private transient Stopwatch processTimer;
private transient List<CommandCallback> callbackList;
protected RemotingCommand() {
}
public static RemotingCommand createRequestCommand(int code, CommandCustomHeader customHeader) {
RemotingCommand cmd = new RemotingCommand();
cmd.setCode(code);
cmd.customHeader = customHeader;
setCmdVersion(cmd);
return cmd;
}
public static RemotingCommand createResponseCommandWithHeader(int code, CommandCustomHeader customHeader) {
RemotingCommand cmd = new RemotingCommand();
cmd.setCode(code);
cmd.markResponseType();
cmd.customHeader = customHeader;
setCmdVersion(cmd);
return cmd;
}
protected static void setCmdVersion(RemotingCommand cmd) {
if (configVersion >= 0) {
cmd.setVersion(configVersion);
} else {
String v = System.getProperty(REMOTING_VERSION_KEY);
if (v != null) {
int value = Integer.parseInt(v);
cmd.setVersion(value);
configVersion = value;
}
}
}
public static RemotingCommand createResponseCommand(Class<? extends CommandCustomHeader> classHeader) {
return createResponseCommand(RemotingSysResponseCode.SYSTEM_ERROR, "not set any response code", classHeader);
}
public static RemotingCommand buildErrorResponse(int code, String remark,
Class<? extends CommandCustomHeader> classHeader) {
final RemotingCommand response = RemotingCommand.createResponseCommand(classHeader);
response.setCode(code);
response.setRemark(remark);
return response;
}
public static RemotingCommand buildErrorResponse(int code, String remark) {
return buildErrorResponse(code, remark, null);
}
public static RemotingCommand createResponseCommand(int code, String remark,
Class<? extends CommandCustomHeader> classHeader) {
RemotingCommand cmd = new RemotingCommand();
cmd.markResponseType();
cmd.setCode(code);
cmd.setRemark(remark);
setCmdVersion(cmd);
if (classHeader != null) {
try {
CommandCustomHeader objectHeader = classHeader.getDeclaredConstructor().newInstance();
cmd.customHeader = objectHeader;
} catch (InstantiationException e) {
return null;
} catch (IllegalAccessException e) {
return null;
} catch (InvocationTargetException e) {
return null;
} catch (NoSuchMethodException e) {
return null;
}
}
return cmd;
}
public static RemotingCommand createResponseCommand(int code, String remark) {
return createResponseCommand(code, remark, null);
}
public static RemotingCommand decode(final byte[] array) throws RemotingCommandException {
ByteBuffer byteBuffer = ByteBuffer.wrap(array);
return decode(byteBuffer);
}
public static RemotingCommand decode(final ByteBuffer byteBuffer) throws RemotingCommandException {
return decode(Unpooled.wrappedBuffer(byteBuffer));
}
public static RemotingCommand decode(final ByteBuf byteBuffer) throws RemotingCommandException {
int length = byteBuffer.readableBytes();
int oriHeaderLen = byteBuffer.readInt();
int headerLength = getHeaderLength(oriHeaderLen);
if (headerLength > length - 4) {
throw new RemotingCommandException("decode error, bad header length: " + headerLength);
}
RemotingCommand cmd = headerDecode(byteBuffer, headerLength, getProtocolType(oriHeaderLen));
int bodyLength = length - 4 - headerLength;
byte[] bodyData = null;
if (bodyLength > 0) {
bodyData = new byte[bodyLength];
byteBuffer.readBytes(bodyData);
}
cmd.body = bodyData;
return cmd;
}
public static int getHeaderLength(int length) {
return length & 0xFFFFFF;
}
private static RemotingCommand headerDecode(ByteBuf byteBuffer, int len,
SerializeType type) throws RemotingCommandException {
switch (type) {
case JSON:
byte[] headerData = new byte[len];
byteBuffer.readBytes(headerData);
RemotingCommand resultJson = RemotingSerializable.decode(headerData, RemotingCommand.class);
resultJson.setSerializeTypeCurrentRPC(type);
return resultJson;
case ROCKETMQ:
RemotingCommand resultRMQ = RocketMQSerializable.rocketMQProtocolDecode(byteBuffer, len);
resultRMQ.setSerializeTypeCurrentRPC(type);
return resultRMQ;
default:
break;
}
return null;
}
public static SerializeType getProtocolType(int source) {
return SerializeType.valueOf((byte) ((source >> 24) & 0xFF));
}
public static int createNewRequestId() {
return requestId.getAndIncrement();
}
public static SerializeType getSerializeTypeConfigInThisServer() {
return serializeTypeConfigInThisServer;
}
public static int markProtocolType(int source, SerializeType type) {
return (type.getCode() << 24) | (source & 0x00FFFFFF);
}
public void markResponseType() {
int bits = 1 << RPC_TYPE;
this.flag |= bits;
}
public CommandCustomHeader readCustomHeader() {
return customHeader;
}
public void writeCustomHeader(CommandCustomHeader customHeader) {
this.customHeader = customHeader;
}
public <T extends CommandCustomHeader> T decodeCommandCustomHeader(
Class<T> classHeader) throws RemotingCommandException {
return decodeCommandCustomHeader(classHeader, false);
}
public <T extends CommandCustomHeader> T decodeCommandCustomHeader(
Class<T> classHeader, boolean isCached) throws RemotingCommandException {
if (isCached && cachedHeader != null) {
return classHeader.cast(cachedHeader);
}
cachedHeader = decodeCommandCustomHeaderDirectly(classHeader, true);
if (cachedHeader == null) {
return null;
}
return classHeader.cast(cachedHeader);
}
public <T extends CommandCustomHeader> T decodeCommandCustomHeaderDirectly(Class<T> classHeader,
boolean useFastEncode) throws RemotingCommandException {
T objectHeader;
try {
objectHeader = classHeader.getDeclaredConstructor().newInstance();
} catch (Exception e) {
return null;
}
if (this.extFields != null) {
if (objectHeader instanceof FastCodesHeader && useFastEncode) {
((FastCodesHeader) objectHeader).decode(this.extFields);
objectHeader.checkFields();
return objectHeader;
}
Field[] fields = getClazzFields(classHeader);
for (Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
String fieldName = field.getName();
if (!fieldName.startsWith("this")) {
try {
String value = this.extFields.get(fieldName);
if (null == value) {
if (!isFieldNullable(field)) {
throw new RemotingCommandException("the custom field <" + fieldName + "> is null");
}
continue;
}
field.setAccessible(true);
String type = getCanonicalName(field.getType());
Object valueParsed;
if (type.equals(STRING_CANONICAL_NAME)) {
valueParsed = value;
} else if (type.equals(INTEGER_CANONICAL_NAME_1) || type.equals(INTEGER_CANONICAL_NAME_2)) {
valueParsed = Integer.parseInt(value);
} else if (type.equals(LONG_CANONICAL_NAME_1) || type.equals(LONG_CANONICAL_NAME_2)) {
valueParsed = Long.parseLong(value);
} else if (type.equals(BOOLEAN_CANONICAL_NAME_1) || type.equals(BOOLEAN_CANONICAL_NAME_2)) {
valueParsed = Boolean.parseBoolean(value);
} else if (type.equals(DOUBLE_CANONICAL_NAME_1) || type.equals(DOUBLE_CANONICAL_NAME_2)) {
valueParsed = Double.parseDouble(value);
} else if (type.equals(BOUNDARY_TYPE_CANONICAL_NAME)) {
valueParsed = BoundaryType.getType(value);
} else {
throw new RemotingCommandException("the custom field <" + fieldName + "> type is not supported");
}
field.set(objectHeader, valueParsed);
} catch (Throwable e) {
log.error("Failed field [{}] decoding", fieldName, e);
}
}
}
}
objectHeader.checkFields();
}
return objectHeader;
}
//make it able to test
Field[] getClazzFields(Class<? extends CommandCustomHeader> classHeader) {
Field[] field = CLASS_HASH_MAP.get(classHeader);
if (field == null) {
Set<Field> fieldList = new HashSet<>();
for (Class className = classHeader; className != Object.class; className = className.getSuperclass()) {
Field[] fields = className.getDeclaredFields();
fieldList.addAll(Arrays.asList(fields));
}
field = fieldList.toArray(new Field[0]);
synchronized (CLASS_HASH_MAP) {
CLASS_HASH_MAP.put(classHeader, field);
}
}
return field;
}
private boolean isFieldNullable(Field field) {
if (!NULLABLE_FIELD_CACHE.containsKey(field)) {
Annotation annotation = field.getAnnotation(CFNotNull.class);
synchronized (NULLABLE_FIELD_CACHE) {
NULLABLE_FIELD_CACHE.put(field, annotation == null);
}
}
return NULLABLE_FIELD_CACHE.get(field);
}
private String getCanonicalName(Class clazz) {
String name = CANONICAL_NAME_CACHE.get(clazz);
if (name == null) {
name = clazz.getCanonicalName();
synchronized (CANONICAL_NAME_CACHE) {
CANONICAL_NAME_CACHE.put(clazz, name);
}
}
return name;
}
public ByteBuffer encode() {
// 1> header length size
int length = 4;
// 2> header data length
byte[] headerData = this.headerEncode();
length += headerData.length;
// 3> body data length
if (this.body != null) {
length += body.length;
}
ByteBuffer result = ByteBuffer.allocate(4 + length);
// length
result.putInt(length);
// header length
result.putInt(markProtocolType(headerData.length, serializeTypeCurrentRPC));
// header data
result.put(headerData);
// body data;
if (this.body != null) {
result.put(this.body);
}
result.flip();
return result;
}
private byte[] headerEncode() {
this.makeCustomHeaderToNet();
if (SerializeType.ROCKETMQ == serializeTypeCurrentRPC) {
return RocketMQSerializable.rocketMQProtocolEncode(this);
} else {
return RemotingSerializable.encode(this);
}
}
public void makeCustomHeaderToNet() {
if (this.customHeader != null) {
Field[] fields = getClazzFields(customHeader.getClass());
if (null == this.extFields) {
this.extFields = new HashMap<>();
}
for (Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
String name = field.getName();
if (!name.startsWith("this")) {
Object value = null;
try {
field.setAccessible(true);
value = field.get(this.customHeader);
} catch (Exception e) {
log.error("Failed to access field [{}]", name, e);
}
if (value != null) {
this.extFields.put(name, value.toString());
}
}
}
}
}
}
public void fastEncodeHeader(ByteBuf out) {
int bodySize = this.body != null ? this.body.length : 0;
int beginIndex = out.writerIndex();
// skip 8 bytes
out.writeLong(0);
int headerSize;
if (SerializeType.ROCKETMQ == serializeTypeCurrentRPC) {
if (customHeader != null && !(customHeader instanceof FastCodesHeader)) {
this.makeCustomHeaderToNet();
}
headerSize = RocketMQSerializable.rocketMQProtocolEncode(this, out);
} else {
this.makeCustomHeaderToNet();
byte[] header = RemotingSerializable.encode(this);
headerSize = header.length;
out.writeBytes(header);
}
out.setInt(beginIndex, 4 + headerSize + bodySize);
out.setInt(beginIndex + 4, markProtocolType(headerSize, serializeTypeCurrentRPC));
}
public ByteBuffer encodeHeader() {
return encodeHeader(this.body != null ? this.body.length : 0);
}
public ByteBuffer encodeHeader(final int bodyLength) {
// 1> header length size
int length = 4;
// 2> header data length
byte[] headerData;
headerData = this.headerEncode();
length += headerData.length;
// 3> body data length
length += bodyLength;
ByteBuffer result = ByteBuffer.allocate(4 + length - bodyLength);
// length
result.putInt(length);
// header length
result.putInt(markProtocolType(headerData.length, serializeTypeCurrentRPC));
// header data
result.put(headerData);
((Buffer) result).flip();
return result;
}
public void markOnewayRPC() {
int bits = 1 << RPC_ONEWAY;
this.flag |= bits;
}
@JSONField(serialize = false)
public boolean isOnewayRPC() {
int bits = 1 << RPC_ONEWAY;
return (this.flag & bits) == bits;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
@JSONField(serialize = false)
public RemotingCommandType getType() {
if (this.isResponseType()) {
return RemotingCommandType.RESPONSE_COMMAND;
}
return RemotingCommandType.REQUEST_COMMAND;
}
@JSONField(serialize = false)
public boolean isResponseType() {
int bits = 1 << RPC_TYPE;
return (this.flag & bits) == bits;
}
public LanguageCode getLanguage() {
return language;
}
public void setLanguage(LanguageCode language) {
this.language = language;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public int getOpaque() {
return opaque;
}
public void setOpaque(int opaque) {
this.opaque = opaque;
}
public int getFlag() {
return flag;
}
public void setFlag(int flag) {
this.flag = flag;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public byte[] getBody() {
return body;
}
public void setBody(byte[] body) {
this.body = body;
}
@JSONField(serialize = false)
public boolean isSuspended() {
return suspended;
}
@JSONField(serialize = false)
public void setSuspended(boolean suspended) {
this.suspended = suspended;
}
public HashMap<String, String> getExtFields() {
return extFields;
}
public void setExtFields(HashMap<String, String> extFields) {
this.extFields = extFields;
}
public void addExtField(String key, String value) {
if (null == extFields) {
extFields = new HashMap<>(256);
}
extFields.put(key, value);
}
public void addExtFieldIfNotExist(String key, String value) {
extFields.putIfAbsent(key, value);
}
@Override
public String toString() {
return "RemotingCommand [code=" + code + ", language=" + language + ", version=" + version + ", opaque=" + opaque + ", flag(B)="
+ Integer.toBinaryString(flag) + ", remark=" + remark + ", extFields=" + extFields + ", serializeTypeCurrentRPC="
+ serializeTypeCurrentRPC + "]";
}
public SerializeType getSerializeTypeCurrentRPC() {
return serializeTypeCurrentRPC;
}
public void setSerializeTypeCurrentRPC(SerializeType serializeTypeCurrentRPC) {
this.serializeTypeCurrentRPC = serializeTypeCurrentRPC;
}
public Stopwatch getProcessTimer() {
return processTimer;
}
public void setProcessTimer(Stopwatch processTimer) {
this.processTimer = processTimer;
}
public List<CommandCallback> getCallbackList() {
return callbackList;
}
public void setCallbackList(List<CommandCallback> callbackList) {
this.callbackList = callbackList;
}
}
|
RemotingCommand
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/issue50/SecurityConfig.java
|
{
"start": 2141,
"end": 3540
}
|
class ____ {
@Autowired
private UserRepository myUserRepository;
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.requestMatchers(pathPattern("/*")).permitAll())
.authenticationProvider(authenticationProvider());
// @formatter:on
return http.build();
}
@Bean
AuthenticationManager authenticationManager() {
return authenticationProvider()::authenticate;
}
@Bean
public AuthenticationProvider authenticationProvider() {
Assert.notNull(this.myUserRepository, "myUserRepository cannot be null");
return new AuthenticationProvider() {
@Override
public boolean supports(Class<?> authentication) {
return true;
}
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
Object principal = authentication.getPrincipal();
String username = String.valueOf(principal);
User user = SecurityConfig.this.myUserRepository.findByUsername(username);
if (user == null) {
throw new UsernameNotFoundException("No user for principal " + principal);
}
if (!authentication.getCredentials().equals(user.getPassword())) {
throw new BadCredentialsException("Invalid password");
}
return new TestingAuthenticationToken(principal, null, "ROLE_USER");
}
};
}
}
|
SecurityConfig
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/vertx/OpenApiStoreSchemaTestCase.java
|
{
"start": 413,
"end": 1286
}
|
class ____ {
private static String directory = "target/generated/vertx/";
private static final String OPEN_API_DOT = "openapi.";
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(OpenApiRoute.class)
.addAsResource(new StringAsset("quarkus.smallrye-openapi.store-schema-directory=" + directory),
"application.properties"));
@Test
public void testOpenApiPathAccessResource() {
Path json = Paths.get(directory, OPEN_API_DOT + Format.JSON.toString().toLowerCase());
Assertions.assertTrue(Files.exists(json));
Path yaml = Paths.get(directory, OPEN_API_DOT + Format.YAML.toString().toLowerCase());
Assertions.assertTrue(Files.exists(yaml));
}
}
|
OpenApiStoreSchemaTestCase
|
java
|
apache__kafka
|
connect/api/src/main/java/org/apache/kafka/connect/source/SourceConnector.java
|
{
"start": 1372,
"end": 6782
}
|
class ____ extends Connector {
@Override
protected SourceConnectorContext context() {
return (SourceConnectorContext) context;
}
/**
* Signals whether the connector supports exactly-once semantics with a proposed configuration.
* Connector authors can assume that worker-level exactly-once support is enabled when this method is invoked.
*
* <p>For backwards compatibility, the default implementation will return {@code null}, but connector authors are
* strongly encouraged to override this method to return a non-null value such as
* {@link ExactlyOnceSupport#SUPPORTED SUPPORTED} or {@link ExactlyOnceSupport#UNSUPPORTED UNSUPPORTED}.
*
* <p>Similar to {@link #validate(Map) validate}, this method may be called by the runtime before the
* {@link #start(Map) start} method is invoked when the connector will be run with exactly-once support.
*
* @param connectorConfig the configuration that will be used for the connector.
* @return {@link ExactlyOnceSupport#SUPPORTED} if the connector can provide exactly-once support with the given
* configuration, and {@link ExactlyOnceSupport#UNSUPPORTED} if it cannot. If this method is overridden by a
* connector, should not be {@code null}, but if {@code null}, it will be assumed that the connector cannot provide
* exactly-once semantics.
* @since 3.3
*/
public ExactlyOnceSupport exactlyOnceSupport(Map<String, String> connectorConfig) {
return null;
}
/**
* Signals whether the connector implementation is capable of defining the transaction boundaries for a
* connector with the given configuration. This method is called before {@link #start(Map)}, only when the
* runtime supports exactly-once and the connector configuration includes {@code transaction.boundary=connector}.
*
* <p>This method need not be implemented if the connector implementation does not support defining
* transaction boundaries.
*
* @param connectorConfig the configuration that will be used for the connector
* @return {@link ConnectorTransactionBoundaries#SUPPORTED} if the connector will define its own transaction boundaries,
* or {@link ConnectorTransactionBoundaries#UNSUPPORTED} otherwise; may never be {@code null}. The default implementation
* returns {@link ConnectorTransactionBoundaries#UNSUPPORTED}.
* @since 3.3
* @see TransactionContext
*/
public ConnectorTransactionBoundaries canDefineTransactionBoundaries(Map<String, String> connectorConfig) {
return ConnectorTransactionBoundaries.UNSUPPORTED;
}
/**
* Invoked when users request to manually alter/reset the offsets for this connector via the Connect worker's REST
* API. Connectors that manage offsets externally can propagate offset changes to their external system in this
* method. Connectors may also validate these offsets to ensure that the source partitions and source offsets are
* in a format that is recognizable to them.
* <p>
* Connectors that neither manage offsets externally nor require custom offset validation need not implement this
* method beyond simply returning {@code true}.
* <p>
* User requests to alter/reset offsets will be handled by the Connect runtime and will be reflected in the offsets
* returned by any {@link org.apache.kafka.connect.storage.OffsetStorageReader OffsetStorageReader instances}
* provided to this connector and its tasks.
* <p>
* Note that altering / resetting offsets is expected to be an idempotent operation and this method should be able
* to handle being called more than once with the same arguments (which could occur if a user retries the request
* due to a failure in writing the new offsets to the offsets store, for example).
* <p>
* Similar to {@link #validate(Map) validate}, this method may be called by the runtime before the
* {@link #start(Map) start} method is invoked.
*
* @param connectorConfig the configuration of the connector
* @param offsets a map from source partition to source offset, containing the offsets that the user has requested
* to alter/reset. For any source partitions whose offsets are being reset instead of altered, their
* corresponding source offset value in the map will be {@code null}. This map may be empty, but
* never null. An empty offsets map could indicate that the offsets were reset previously or that no
* offsets have been committed yet.
* @return whether this method has been overridden by the connector; the default implementation returns
* {@code false}, and all other implementations (that do not unconditionally throw exceptions) should return
* {@code true}
* @throws UnsupportedOperationException if it is impossible to alter/reset the offsets for this connector
* @throws org.apache.kafka.connect.errors.ConnectException if the offsets for this connector cannot be
* reset for any other reason (for example, they have failed custom validation logic specific to this connector)
* @since 3.6
*/
public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) {
return false;
}
}
|
SourceConnector
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/read/ParserLinefeedsTest.java
|
{
"start": 391,
"end": 2081
}
|
class ____
extends JacksonCoreTestBase
{
@Test
void cr() throws Exception
{
_testLinefeeds("\r", true);
_testLinefeeds("\r", false);
}
@Test
void lf() throws Exception
{
_testLinefeeds("\n", true);
_testLinefeeds("\n", false);
}
@Test
void crlf() throws Exception
{
_testLinefeeds("\r\n", true);
_testLinefeeds("\r\n", false);
}
/*
/**********************************************************
/* Helper methods
/**********************************************************
*/
private void _testLinefeeds(String lf, boolean useStream)
throws IOException
{
String DOC = "[1,@2,@-478@]";
DOC = DOC.replaceAll("@", lf);
JsonParser jp = useStream ?
createParserUsingStream(DOC, "UTF-8")
: createParserUsingReader(DOC);
assertToken(JsonToken.START_ARRAY, jp.nextToken());
assertEquals(1, jp.currentLocation().getLineNr());
assertToken(JsonToken.VALUE_NUMBER_INT, jp.nextToken());
assertEquals(1, jp.getIntValue());
assertEquals(1, jp.currentLocation().getLineNr());
assertToken(JsonToken.VALUE_NUMBER_INT, jp.nextToken());
assertEquals(2, jp.getIntValue());
assertEquals(2, jp.currentLocation().getLineNr());
assertToken(JsonToken.VALUE_NUMBER_INT, jp.nextToken());
assertEquals(-478, jp.getIntValue());
assertEquals(3, jp.currentLocation().getLineNr());
assertToken(JsonToken.END_ARRAY, jp.nextToken());
assertEquals(4, jp.currentLocation().getLineNr());
jp.close();
}
}
|
ParserLinefeedsTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/response/IbmWatsonxEmbeddingsResponseEntityTests.java
|
{
"start": 805,
"end": 4101
}
|
class ____ extends ESTestCase {
public void testFromResponse_CreatesResultsForASingleItem() throws IOException {
String responseJson = """
{
"results": [
{
"embedding": [
-0.00606332,
0.058092743
],
"input": "abc"
}
]
}
""";
DenseEmbeddingFloatResults parsedResults = IbmWatsonxEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(parsedResults.embeddings(), is(List.of(DenseEmbeddingFloatResults.Embedding.of(List.of(-0.00606332F, 0.058092743F)))));
}
public void testFromResponse_CreatesResultsForMultipleItems() throws IOException {
String responseJson = """
{
"results": [
{
"embedding": [
-0.00606332,
0.058092743
],
"input": "abc"
},
{
"embedding": [
0.030681048,
0.01714732
],
"input": "efg"
}
]
}
""";
DenseEmbeddingFloatResults parsedResults = IbmWatsonxEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
DenseEmbeddingFloatResults.Embedding.of(List.of(-0.00606332F, 0.058092743F)),
DenseEmbeddingFloatResults.Embedding.of(List.of(0.030681048F, 0.01714732F))
)
)
);
}
public void testFromResponse_FailsWhenEmbeddingsFieldIsNotPresent() {
String responseJson = """
{
"non_results": [
{
"embedding": [
-0.00606332,
0.058092743
],
"input": "abc"
},
{
"embedding": [
0.030681048,
0.01714732
],
"input": "efg"
}
]
}
""";
var thrownException = expectThrows(
IllegalStateException.class,
() -> IbmWatsonxEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
)
);
assertThat(thrownException.getMessage(), is("Failed to find required field [results] in IBM watsonx embeddings response"));
}
}
|
IbmWatsonxEmbeddingsResponseEntityTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/javadoc/MissingSummaryTest.java
|
{
"start": 3438,
"end": 3764
}
|
class ____ {
/**
* @throws IllegalStateException
*/
private void test() {}
}
""")
.doTest();
}
@Test
public void effectivelyPrivateCase() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/trace/event/naming/DeregisterServiceTraceEvent.java
|
{
"start": 748,
"end": 1138
}
|
class ____ extends NamingTraceEvent {
private static final long serialVersionUID = 7358195336881398548L;
public DeregisterServiceTraceEvent(long eventTime, String serviceNamespace, String serviceGroup,
String serviceName) {
super("DEREGISTER_SERVICE_TRACE_EVENT", eventTime, serviceNamespace, serviceGroup, serviceName);
}
}
|
DeregisterServiceTraceEvent
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/superbuilder/ChainedAccessorsCar.java
|
{
"start": 1251,
"end": 1657
}
|
class ____
extends ChainedAccessorsCarBuilder<ChainedAccessorsCar, ChainedAccessorsCarBuilderImpl> {
private ChainedAccessorsCarBuilderImpl() {
}
protected ChainedAccessorsCarBuilderImpl self() {
return this;
}
public ChainedAccessorsCar build() {
return new ChainedAccessorsCar( this );
}
}
}
|
ChainedAccessorsCarBuilderImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitor.java
|
{
"start": 893,
"end": 983
}
|
class ____ checking bounding box relations against a serialized triangle tree.
*
*/
|
supports
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/grant/MySqlGrantTest_1.java
|
{
"start": 969,
"end": 2363
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "GRANT SELECT ON db2.invoice TO 'jeffrey'@'localhost';";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("GRANT SELECT ON db2.invoice TO 'jeffrey'@'localhost';", //
output);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("City")));
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("t2")));
// assertTrue(visitor.getColumns().contains(new Column("t2", "id")));
}
}
|
MySqlGrantTest_1
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/windowing/MergingWindowSet.java
|
{
"start": 2361,
"end": 9315
}
|
class ____<W extends Window> {
private static final Logger LOG = LoggerFactory.getLogger(MergingWindowSet.class);
/**
* Mapping from window to the window that keeps the window state. When we are incrementally
* merging windows starting from some window we keep that starting window as the state window to
* prevent costly state juggling.
*/
private final Map<W, W> mapping;
/**
* Mapping when we created the {@code MergingWindowSet}. We use this to decide whether we need
* to persist any changes to state.
*/
private final Map<W, W> initialMapping;
private final ListState<Tuple2<W, W>> state;
/** Our window assigner. */
private final MergingWindowAssigner<?, W> windowAssigner;
/** Restores a {@link MergingWindowSet} from the given state. */
public MergingWindowSet(
MergingWindowAssigner<?, W> windowAssigner, ListState<Tuple2<W, W>> state)
throws Exception {
this.windowAssigner = windowAssigner;
mapping = new HashMap<>();
Iterable<Tuple2<W, W>> windowState = state.get();
if (windowState != null) {
for (Tuple2<W, W> window : windowState) {
mapping.put(window.f0, window.f1);
}
}
this.state = state;
initialMapping = new HashMap<>();
initialMapping.putAll(mapping);
}
/**
* Persist the updated mapping to the given state if the mapping changed since initialization.
*/
public void persist() throws Exception {
if (!mapping.equals(initialMapping)) {
state.update(
mapping.entrySet().stream()
.map((w) -> new Tuple2<>(w.getKey(), w.getValue()))
.collect(Collectors.toList()));
}
}
/**
* Returns the state window for the given in-flight {@code Window}. The state window is the
* {@code Window} in which we keep the actual state of a given in-flight window. Windows might
* expand but we keep to original state window for keeping the elements of the window to avoid
* costly state juggling.
*
* @param window The window for which to get the state window.
*/
public W getStateWindow(W window) {
return mapping.get(window);
}
/**
* Removes the given window from the set of in-flight windows.
*
* @param window The {@code Window} to remove.
*/
public void retireWindow(W window) {
W removed = this.mapping.remove(window);
if (removed == null) {
throw new IllegalStateException(
"Window " + window + " is not in in-flight window set.");
}
}
/**
* Adds a new {@code Window} to the set of in-flight windows. It might happen that this triggers
* merging of previously in-flight windows. In that case, the provided {@link MergeFunction} is
* called.
*
* <p>This returns the window that is the representative of the added window after adding. This
* can either be the new window itself, if no merge occurred, or the newly merged window. Adding
* an element to a window or calling trigger functions should only happen on the returned
* representative. This way, we never have to deal with a new window that is immediately
* swallowed up by another window.
*
* <p>If the new window is merged, the {@code MergeFunction} callback arguments also don't
* contain the new window as part of the list of merged windows.
*
* @param newWindow The new {@code Window} to add.
* @param mergeFunction The callback to be invoked in case a merge occurs.
* @return The {@code Window} that new new {@code Window} ended up in. This can also be the new
* {@code Window} itself in case no merge occurred.
* @throws Exception
*/
public W addWindow(W newWindow, MergeFunction<W> mergeFunction) throws Exception {
List<W> windows = new ArrayList<>();
windows.addAll(this.mapping.keySet());
windows.add(newWindow);
final Map<W, Collection<W>> mergeResults = new HashMap<>();
windowAssigner.mergeWindows(
windows,
new MergingWindowAssigner.MergeCallback<W>() {
@Override
public void merge(Collection<W> toBeMerged, W mergeResult) {
if (LOG.isDebugEnabled()) {
LOG.debug("Merging {} into {}", toBeMerged, mergeResult);
}
mergeResults.put(mergeResult, toBeMerged);
}
});
W resultWindow = newWindow;
boolean mergedNewWindow = false;
// perform the merge
for (Map.Entry<W, Collection<W>> c : mergeResults.entrySet()) {
W mergeResult = c.getKey();
Collection<W> mergedWindows = c.getValue();
// if our new window is in the merged windows make the merge result the
// result window
if (mergedWindows.remove(newWindow)) {
mergedNewWindow = true;
resultWindow = mergeResult;
}
// pick any of the merged windows and choose that window's state window
// as the state window for the merge result
W mergedStateWindow = this.mapping.get(mergedWindows.iterator().next());
// figure out the state windows that we are merging
List<W> mergedStateWindows = new ArrayList<>();
for (W mergedWindow : mergedWindows) {
W res = this.mapping.remove(mergedWindow);
if (res != null) {
mergedStateWindows.add(res);
}
}
this.mapping.put(mergeResult, mergedStateWindow);
// don't put the target state window into the merged windows
mergedStateWindows.remove(mergedStateWindow);
// don't merge the new window itself, it never had any state associated with it
// i.e. if we are only merging one pre-existing window into itself
// without extending the pre-existing window
if (!(mergedWindows.contains(mergeResult) && mergedWindows.size() == 1)) {
mergeFunction.merge(
mergeResult,
mergedWindows,
this.mapping.get(mergeResult),
mergedStateWindows);
}
}
// the new window created a new, self-contained window without merging
if (mergeResults.isEmpty() || (resultWindow.equals(newWindow) && !mergedNewWindow)) {
this.mapping.put(resultWindow, resultWindow);
}
return resultWindow;
}
/**
* Callback for {@link #addWindow(Window, MergeFunction)}.
*
* @param <W>
*/
public
|
MergingWindowSet
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/ConfigurableApplicationContext.java
|
{
"start": 1700,
"end": 4661
}
|
interface ____ extends ApplicationContext, Lifecycle, Closeable {
/**
* Any number of these characters are considered delimiters between multiple
* context config paths in a single {@code String} value: {@value}.
* @see org.springframework.context.support.AbstractXmlApplicationContext#setConfigLocation
* @see org.springframework.web.context.ContextLoader#CONFIG_LOCATION_PARAM
* @see org.springframework.web.servlet.FrameworkServlet#setContextConfigLocation
*/
String CONFIG_LOCATION_DELIMITERS = ",; \t\n";
/**
* The name of the {@linkplain java.util.concurrent.Executor bootstrap executor}
* bean in the context: {@value}.
* <p>If none is supplied, no background bootstrapping will be active.
* @since 6.2
* @see java.util.concurrent.Executor
* @see org.springframework.core.task.TaskExecutor
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setBootstrapExecutor
*/
String BOOTSTRAP_EXECUTOR_BEAN_NAME = "bootstrapExecutor";
/**
* Name of the {@code ConversionService} bean in the factory: {@value}.
* <p>If none is supplied, default conversion rules apply.
* @since 3.0
* @see org.springframework.core.convert.ConversionService
*/
String CONVERSION_SERVICE_BEAN_NAME = "conversionService";
/**
* Name of the {@code LoadTimeWeaver} bean in the factory: {@value}.
* <p>If such a bean is supplied, the context will use a temporary {@link ClassLoader}
* for type matching, in order to allow the {@code LoadTimeWeaver} to process
* all actual bean classes.
* @since 2.5
* @see org.springframework.instrument.classloading.LoadTimeWeaver
*/
String LOAD_TIME_WEAVER_BEAN_NAME = "loadTimeWeaver";
/**
* Name of the {@link org.springframework.core.env.Environment Environment}
* bean in the factory: {@value}.
* @since 3.1
*/
String ENVIRONMENT_BEAN_NAME = "environment";
/**
* Name of the JVM System properties bean in the factory: {@value}.
* @see java.lang.System#getProperties()
*/
String SYSTEM_PROPERTIES_BEAN_NAME = "systemProperties";
/**
* Name of the Operating System environment bean in the factory: {@value}.
* @see java.lang.System#getenv()
*/
String SYSTEM_ENVIRONMENT_BEAN_NAME = "systemEnvironment";
/**
* Name of the {@link ApplicationStartup} bean in the factory: {@value}.
* @since 5.3
*/
String APPLICATION_STARTUP_BEAN_NAME = "applicationStartup";
/**
* {@linkplain Thread#getName() Name} of the {@linkplain #registerShutdownHook()
* shutdown hook} thread: {@value}.
* @since 5.2
* @see #registerShutdownHook()
*/
String SHUTDOWN_HOOK_THREAD_NAME = "SpringContextShutdownHook";
/**
* Set the unique ID of this application context.
* @since 3.0
*/
void setId(String id);
/**
* Set the parent of this application context.
* <p>Note that the parent shouldn't be changed: It should only be set outside
* a constructor if it isn't available when an object of this
|
ConfigurableApplicationContext
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/scenario/MaintenanceNotificationConnectionTest.java
|
{
"start": 39995,
"end": 47016
}
|
class ____ implements MaintenanceNotificationCapture {
private final TestCapture firstCapture;
private final RedisURI uri;
private final StatefulRedisConnection<String, String> firstConnection;
private final AtomicReference<TestCapture> secondCapture = new AtomicReference<>();
private final AtomicReference<RedisClient> secondClient = new AtomicReference<>();
private final AtomicReference<StatefulRedisConnection<String, String>> secondConnection = new AtomicReference<>();
private final CountDownLatch secondConnectionMovingLatch = new CountDownLatch(1);
private final AtomicBoolean testPhaseActive = new AtomicBoolean(true);
public DualConnectionCapture(TestCapture firstCapture, RedisURI uri, String bdbId,
StatefulRedisConnection<String, String> firstConnection) {
this.firstCapture = firstCapture;
this.uri = uri;
this.firstConnection = firstConnection;
}
@Override
public void captureNotification(String notification) {
if (!testPhaseActive.get()) {
log.debug("Ignoring notification during cleanup phase: {}", notification);
return;
}
firstCapture.captureNotification(notification);
// If this is a MIGRATED notification and we haven't created second connection yet, create it
// MIGRATED comes right after the bind is fired, before MOVING notification
if (notification.contains("MIGRATED") && secondConnection.get() == null) {
log.info("MIGRATED notification received - creating second connection right after bind");
createSecondConnection();
}
}
private void createSecondConnection() {
try {
log.info("Creating second connection for dual connection test...");
// Get the channel from the first connection to determine the actual IP address
Channel firstChannel = ConnectionTestUtil.getChannel(firstConnection);
String actualIpAddress = null;
int actualPort = -1;
if (firstChannel != null && firstChannel.remoteAddress() != null) {
String remoteAddress = firstChannel.remoteAddress().toString();
log.info("First connection remote address: {}", remoteAddress);
// Handle different address formats:
// Format 1: "/54.74.227.236:12000" (direct IP)
// Format 2: "redis-12000.ivo-somefdqn.com/54.74.227.236:12000" (FQDN with resolved
// IP)
String ipPortString = null;
if (remoteAddress.contains("/")) {
// Extract the part after the last slash (the actual IP:port)
int lastSlashIndex = remoteAddress.lastIndexOf('/');
ipPortString = remoteAddress.substring(lastSlashIndex + 1);
} else {
// Direct IP:port format
ipPortString = remoteAddress;
}
if (ipPortString != null) {
String[] parts = ipPortString.split(":");
if (parts.length == 2) {
actualIpAddress = parts[0];
actualPort = Integer.parseInt(parts[1]);
log.info("Extracted actual IP address: {}:{}", actualIpAddress, actualPort);
}
}
} else {
log.warn("Could not determine actual IP address from first connection, using original URI");
}
// Create URI for the second connection - use the same IP address as the first connection if available
RedisURI secondUri;
if (actualIpAddress != null && actualPort != -1) {
secondUri = RedisURI.builder().withHost(actualIpAddress).withPort(actualPort)
.withAuthentication(mStandard.getUsername(), mStandard.getPassword()).build();
log.info("Creating second connection to same IP address: {}:{}", actualIpAddress, actualPort);
} else {
log.warn("Could not extract actual IP address, falling back to original URI");
secondUri = uri;
}
RedisClient client = RedisClient.create(secondUri);
ClientOptions options = ClientOptions.builder().protocolVersion(ProtocolVersion.RESP3)
.maintNotificationsConfig(MaintNotificationsConfig.enabled(EndpointType.EXTERNAL_IP)).build();
client.setOptions(options);
StatefulRedisConnection<String, String> connection = client.connect();
TestCapture capture = new TestCapture() {
@Override
public void captureNotification(String notification) {
super.captureNotification(notification);
// Signal when second connection receives MOVING
if (notification.contains("MOVING")) {
log.info("Second connection received MOVING notification");
secondConnectionMovingLatch.countDown();
}
}
};
MaintenancePushNotificationMonitor.setupMonitoring(connection, capture);
secondClient.set(client);
secondConnection.set(connection);
secondCapture.set(capture);
log.info("Second connection created and monitoring setup completed");
} catch (Exception e) {
log.error("Failed to create second connection: {}", e.getMessage(), e);
}
}
public boolean waitForSecondConnectionMoving(Duration timeout) throws InterruptedException {
return secondConnectionMovingLatch.await(timeout.toMillis(), TimeUnit.MILLISECONDS);
}
public TestCapture getFirstCapture() {
return firstCapture;
}
public TestCapture getSecondCapture() {
return secondCapture.get();
}
public RedisClient getSecondClient() {
return secondClient.get();
}
public StatefulRedisConnection<String, String> getSecondConnection() {
return secondConnection.get();
}
public void endTestPhase() {
testPhaseActive.set(false);
firstCapture.endTestPhase();
if (secondCapture.get() != null) {
secondCapture.get().endTestPhase();
}
log.info("Dual connection test phase ended - notifications will be ignored during cleanup");
}
}
public static
|
DualConnectionCapture
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/htmlunit/server/MockWebResponseBuilder.java
|
{
"start": 1155,
"end": 2601
}
|
class ____ {
private final long startTime;
private final WebRequest webRequest;
private final FluxExchangeResult<String> exchangeResult;
MockWebResponseBuilder(long startTime, WebRequest webRequest, FluxExchangeResult<String> exchangeResult) {
Assert.notNull(webRequest, "WebRequest must not be null");
Assert.notNull(exchangeResult, "FluxExchangeResult must not be null");
this.startTime = startTime;
this.webRequest = webRequest;
this.exchangeResult = exchangeResult;
}
WebResponse build() throws IOException {
WebResponseData webResponseData = webResponseData();
long endTime = System.currentTimeMillis();
return new WebResponse(webResponseData, this.webRequest, endTime - this.startTime);
}
private WebResponseData webResponseData() {
List<NameValuePair> responseHeaders = responseHeaders();
HttpStatus status = HttpStatus.resolve(this.exchangeResult.getStatus().value());
return new WebResponseData(this.exchangeResult.getResponseBodyContent(), status.value(),
status.getReasonPhrase(), responseHeaders);
}
private List<NameValuePair> responseHeaders() {
HttpHeaders responseHeaders = this.exchangeResult.getResponseHeaders();
List<NameValuePair> result = new ArrayList<>(responseHeaders.size());
responseHeaders.forEach((headerName, headerValues) -> headerValues
.forEach((headerValue) -> result.add(new NameValuePair(headerName, headerValue))));
return result;
}
}
|
MockWebResponseBuilder
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/vector/RedisVectorSetResp2IntegrationTests.java
|
{
"start": 1121,
"end": 1358
}
|
class ____ extends RedisVectorSetIntegrationTests {
@Override
protected ClientOptions getOptions() {
return ClientOptions.builder().protocolVersion(ProtocolVersion.RESP2).build();
}
}
|
RedisVectorSetResp2IntegrationTests
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-jetty/src/main/java/smoketest/jetty/util/StringUtil.java
|
{
"start": 696,
"end": 889
}
|
class ____ {
private StringUtil() {
}
public static String repeat(char c, int length) {
char[] chars = new char[length];
Arrays.fill(chars, c);
return new String(chars);
}
}
|
StringUtil
|
java
|
spring-projects__spring-framework
|
spring-core-test/src/test/java/com/example/PackagePrivate.java
|
{
"start": 649,
"end": 737
}
|
class ____ {
String perform() {
return "Hello from PackagePrivate";
}
}
|
PackagePrivate
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/aot/QueriesFactoryUnitTests.java
|
{
"start": 3472,
"end": 3654
}
|
interface ____ extends Repository<MyEntity, Long> {
@Query("select t from #{#entityName} t")
Collection<MyEntity> someFind();
}
@Entity(name = "CustomNamed")
static
|
MyRepository
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/logout/Saml2LogoutRequest.java
|
{
"start": 1576,
"end": 6006
}
|
class ____ implements Serializable {
@Serial
private static final long serialVersionUID = -3588981995674761337L;
private static final Function<Map<String, String>, String> DEFAULT_ENCODER = (params) -> {
if (params.isEmpty()) {
return null;
}
UriComponentsBuilder builder = UriComponentsBuilder.newInstance();
for (Map.Entry<String, String> component : params.entrySet()) {
builder.queryParam(component.getKey(), UriUtils.encode(component.getValue(), StandardCharsets.ISO_8859_1));
}
return builder.build(true).toString().substring(1);
};
private final String location;
private final Saml2MessageBinding binding;
private final Map<String, String> parameters;
private final String id;
private final String relyingPartyRegistrationId;
private transient Function<Map<String, String>, String> encoder;
private Saml2LogoutRequest(String location, Saml2MessageBinding binding, Map<String, String> parameters, String id,
String relyingPartyRegistrationId) {
this(location, binding, parameters, id, relyingPartyRegistrationId, DEFAULT_ENCODER);
}
private Saml2LogoutRequest(String location, Saml2MessageBinding binding, Map<String, String> parameters, String id,
String relyingPartyRegistrationId, Function<Map<String, String>, String> encoder) {
this.location = location;
this.binding = binding;
this.parameters = Collections.unmodifiableMap(new LinkedHashMap<>(parameters));
this.id = id;
this.relyingPartyRegistrationId = relyingPartyRegistrationId;
this.encoder = encoder;
}
/**
* The unique identifier for this Logout Request
* @return the Logout Request identifier
*/
public String getId() {
return this.id;
}
/**
* Get the location of the asserting party's <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService</a>
* @return the SingleLogoutService location
*/
public String getLocation() {
return this.location;
}
/**
* Get the binding for the asserting party's <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService</a>
* @return the SingleLogoutService binding
*/
public Saml2MessageBinding getBinding() {
return this.binding;
}
/**
* Get the signed and serialized <saml2:LogoutRequest> payload
* @return the signed and serialized <saml2:LogoutRequest> payload
*/
public String getSamlRequest() {
return this.parameters.get(Saml2ParameterNames.SAML_REQUEST);
}
/**
* The relay state associated with this Logout Request
* @return the relay state
*/
public String getRelayState() {
return this.parameters.get(Saml2ParameterNames.RELAY_STATE);
}
/**
* Get the {@code name} parameters, a short-hand for <code>
* getParameters().get(name)
* </code>
*
* Useful when specifying additional query parameters for the Logout Request
* @param name the parameter's name
* @return the parameter's value
*/
public String getParameter(String name) {
return this.parameters.get(name);
}
/**
* Get all parameters
*
* Useful when specifying additional query parameters for the Logout Request
* @return the Logout Request query parameters
*/
public Map<String, String> getParameters() {
return this.parameters;
}
/**
* Get an encoded query string of all parameters. Resulting query does not contain a
* leading question mark.
* @return an encoded string of all parameters
* @since 5.8
*/
public String getParametersQuery() {
return this.encoder.apply(this.parameters);
}
/**
* The identifier for the {@link RelyingPartyRegistration} associated with this Logout
* Request
* @return the {@link RelyingPartyRegistration} id
*/
public String getRelyingPartyRegistrationId() {
return this.relyingPartyRegistrationId;
}
/**
* Create a {@link Builder} instance from this {@link RelyingPartyRegistration}
*
* Specifically, this will pull the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService</a>
* location and binding from the {@link RelyingPartyRegistration}
* @param registration the {@link RelyingPartyRegistration} to use
* @return the {@link Builder} for further configurations
*/
public static Builder withRelyingPartyRegistration(RelyingPartyRegistration registration) {
return new Builder(registration);
}
public static final
|
Saml2LogoutRequest
|
java
|
processing__processing4
|
core/src/processing/core/PGraphics.java
|
{
"start": 7788,
"end": 61405
}
|
class ____ extends PImage implements PConstants {
/** width * height (useful for many calculations) */
@SuppressWarnings("unused")
public int pixelCount;
/** the anti-aliasing level for renderers that support it */
public int smooth;
// ........................................................
/** true if defaults() has been called a first time */
protected boolean settingsInited;
/** true if settings should be re-applied on next beginDraw() */
protected boolean reapplySettings;
/** set to a PGraphics object being used inside a beginRaw/endRaw() block */
protected PGraphics raw;
// ........................................................
/** path to the file being saved for this renderer (if any) */
protected String path;
/**
* True if this is the main graphics context for a sketch.
* False for offscreen buffers retrieved via createGraphics().
*/
protected boolean primaryGraphics;
// ........................................................
/**
* Array of hint[] items. These are hacks to get around various
* temporary workarounds inside the environment.
* <p/>
* Note that this array cannot be static, as a hint() may result in a
* runtime change specific to a renderer. For instance, calling
* hint(DISABLE_DEPTH_TEST) has to call glDisable() right away on an
* instance of PGraphicsOpenGL.
* <p/>
* The hints[] array is allocated early on because it might
* be used inside beginDraw(), allocate(), etc.
*/
protected boolean[] hints = new boolean[HINT_COUNT];
// ........................................................
/**
* Storage for renderer-specific image data. In 1.x, renderers wrote cache
* data into the image object. In 2.x, the renderer has a weak-referenced
* map that points at any of the images it has worked on already. When the
* images go out of scope, they will be properly garbage collected.
*/
protected WeakHashMap<PImage, Object> cacheMap =
new WeakHashMap<>();
////////////////////////////////////////////////////////////
// Vertex fields, moved from PConstants (after 2.0a8) because they're too
// general to show up in all sketches as defined variables.
// X, Y and Z are still stored in PConstants because of their general
// usefulness, and that X we'll always want to be 0, etc.
static public final int R = 3; // actual rgb, after lighting
static public final int G = 4; // fill stored here, transform in place
static public final int B = 5; // TODO don't do that anymore (?)
static public final int A = 6;
static public final int U = 7; // texture
static public final int V = 8;
static public final int NX = 9; // normal
static public final int NY = 10;
static public final int NZ = 11;
static public final int EDGE = 12;
// stroke
/** stroke argb values */
static public final int SR = 13;
static public final int SG = 14;
static public final int SB = 15;
static public final int SA = 16;
/** stroke weight */
static public final int SW = 17;
// transformations (2D and 3D)
@SuppressWarnings("unused")
static public final int TX = 18; // transformed xyzw
@SuppressWarnings("unused")
static public final int TY = 19;
@SuppressWarnings("unused")
static public final int TZ = 20;
@SuppressWarnings("unused")
static public final int VX = 21; // view space coords
@SuppressWarnings("unused")
static public final int VY = 22;
@SuppressWarnings("unused")
static public final int VZ = 23;
@SuppressWarnings("unused")
static public final int VW = 24;
// material properties
// Ambient color (usually to be kept the same as diffuse)
// fill(_) sets both ambient and diffuse.
static public final int AR = 25;
static public final int AG = 26;
static public final int AB = 27;
// Diffuse is shared with fill.
@SuppressWarnings("unused")
static public final int DR = 3; // TODO needs to not be shared, this is a material property
@SuppressWarnings("unused")
static public final int DG = 4;
@SuppressWarnings("unused")
static public final int DB = 5;
@SuppressWarnings("unused")
static public final int DA = 6;
// specular (by default kept white)
static public final int SPR = 28;
static public final int SPG = 29;
static public final int SPB = 30;
static public final int SHINE = 31;
// emissive (by default kept black)
static public final int ER = 32;
static public final int EG = 33;
static public final int EB = 34;
// has this vertex been lit yet
static public final int BEEN_LIT = 35;
// has this vertex been assigned a normal yet
static public final int HAS_NORMAL = 36;
static public final int VERTEX_FIELD_COUNT = 37;
////////////////////////////////////////////////////////////
// STYLE PROPERTIES
// Also inherits imageMode() and smooth() (among others) from PImage.
/** The current colorMode */
public int colorMode; // = RGB;
/** Max value for red (or hue) set by colorMode */
public float colorModeX; // = 255;
/** Max value for green (or saturation) set by colorMode */
public float colorModeY; // = 255;
/** Max value for blue (or value) set by colorMode */
public float colorModeZ; // = 255;
/** Max value for alpha set by colorMode */
public float colorModeA; // = 255;
/** True if colors are not in the range 0..1 */
boolean colorModeScale; // = true;
/**
* True if colorMode(RGB, 255). Defaults to true so that color()
* used as part of a field declaration will properly assign values.
*/
boolean colorModeDefault = true;
// ........................................................
// Tint color for images
/**
* True if tint() is enabled (read-only).
* <p/>
* Using tint/tintColor seems a better option for naming than
* tintEnabled/tint because the latter seems ugly, even though
* g.tint as the actual color seems a little more intuitive,
* it's just that g.tintEnabled is even more unintuitive.
* Same goes for fill and stroke, et al.
*/
public boolean tint;
/** tint that was last set (read-only) */
public int tintColor;
protected boolean tintAlpha;
protected float tintR, tintG, tintB, tintA;
protected int tintRi, tintGi, tintBi, tintAi;
// ........................................................
// Fill color
/** true if fill() is enabled, (read-only) */
public boolean fill;
/** fill that was last set (read-only) */
public int fillColor = 0xffFFFFFF;
protected boolean fillAlpha;
protected float fillR, fillG, fillB, fillA;
protected int fillRi, fillGi, fillBi, fillAi;
// ........................................................
// Stroke color
/** true if stroke() is enabled, (read-only) */
public boolean stroke;
/** stroke that was last set (read-only) */
public int strokeColor = 0xff000000;
protected boolean strokeAlpha;
protected float strokeR, strokeG, strokeB, strokeA;
protected int strokeRi, strokeGi, strokeBi, strokeAi;
// ........................................................
// Additional stroke properties
static protected final float DEFAULT_STROKE_WEIGHT = 1;
static protected final int DEFAULT_STROKE_JOIN = MITER;
static protected final int DEFAULT_STROKE_CAP = ROUND;
/**
* Last value set by strokeWeight() (read-only). This has a default
* setting, rather than fighting with renderers about whether that
* renderer supports thick lines.
*/
public float strokeWeight = DEFAULT_STROKE_WEIGHT;
/**
* Set by strokeJoin() (read-only). This has a default setting
* so that strokeJoin() need not be called by defaults,
* because subclasses may not implement it (i.e. PGraphicsGL)
*/
public int strokeJoin = DEFAULT_STROKE_JOIN;
/**
* Set by strokeCap() (read-only). This has a default setting
* so that strokeCap() need not be called by defaults,
* because subclasses may not implement it (i.e. PGraphicsGL)
*/
public int strokeCap = DEFAULT_STROKE_CAP;
// ........................................................
// Shape placement properties
// imageMode() is inherited from PImage
/** The current rect mode (read-only) */
public int rectMode;
/** The current ellipse mode (read-only) */
public int ellipseMode;
/** The current shape alignment mode (read-only) */
public int shapeMode;
/** The current image alignment (read-only) */
public int imageMode = CORNER;
// ........................................................
// Text and font properties
/** The current text font (read-only) */
public PFont textFont;
/** The current text align (read-only) */
public int textAlign = LEFT;
/** The current vertical text alignment (read-only) */
public int textAlignY = BASELINE;
/** The current text mode (read-only) */
public int textMode = MODEL;
/** The current text size (read-only) */
public float textSize;
/** The current text leading (read-only) */
public float textLeading;
/** Used internally to check whether still using the default font */
protected String defaultFontName;
static final protected String ERROR_TEXTFONT_NULL_PFONT =
"A null PFont was passed to textFont()";
// ........................................................
// Material properties
// PMaterial material;
// PMaterial[] materialStack;
// int materialStackPointer;
public int ambientColor;
public float ambientR, ambientG, ambientB;
public boolean setAmbient;
public int specularColor;
public float specularR, specularG, specularB;
public int emissiveColor;
public float emissiveR, emissiveG, emissiveB;
public float shininess;
// Style stack
static final int STYLE_STACK_DEPTH = 64;
PStyle[] styleStack = new PStyle[STYLE_STACK_DEPTH];
int styleStackDepth;
////////////////////////////////////////////////////////////
/** Last background color that was set, zero if an image */
public int backgroundColor = 0xffCCCCCC;
protected boolean backgroundAlpha;
protected float backgroundR, backgroundG, backgroundB, backgroundA;
protected int backgroundRi, backgroundGi, backgroundBi, backgroundAi;
static final protected String ERROR_BACKGROUND_IMAGE_SIZE =
"background image must be the same size as your application";
static final protected String ERROR_BACKGROUND_IMAGE_FORMAT =
"background images should be RGB or ARGB";
/** The current blending mode. */
protected int blendMode;
// ........................................................
/**
* Current model-view matrix transformation of the form m[row][column],
* which is a "column vector" (as opposed to "row vector") matrix.
*/
// PMatrix matrix;
// public float m00, m01, m02, m03;
// public float m10, m11, m12, m13;
// public float m20, m21, m22, m23;
// public float m30, m31, m32, m33;
// static final int MATRIX_STACK_DEPTH = 32;
// float[][] matrixStack = new float[MATRIX_STACK_DEPTH][16];
// float[][] matrixInvStack = new float[MATRIX_STACK_DEPTH][16];
// int matrixStackDepth;
static final protected int MATRIX_STACK_DEPTH = 32;
static final protected String ERROR_PUSHMATRIX_OVERFLOW =
"Too many calls to pushMatrix().";
static final protected String ERROR_PUSHMATRIX_UNDERFLOW =
"Too many calls to popMatrix(), and not enough to pushMatrix().";
// ........................................................
/**
* Java AWT Image object associated with this renderer. For the 1.0 version
* of P2D and P3D, this was associated with their MemoryImageSource.
* For PGraphicsJava2D, it will be the offscreen drawing buffer.
*/
public Image image;
/** Surface object that we're talking to */
protected PSurface surface;
// ........................................................
// internal color for setting/calculating
protected float calcR, calcG, calcB, calcA;
protected int calcRi, calcGi, calcBi, calcAi;
protected int calcColor;
protected boolean calcAlpha;
/** The last RGB value converted to HSB */
int cacheHsbKey;
/** Result of the last conversion to HSB */
float[] cacheHsbValue = new float[3];
// ........................................................
/**
* Type of shape passed to beginShape(),
* zero if no shape is currently being drawn.
*/
protected int shape;
// vertices
public static final int DEFAULT_VERTICES = 512;
protected float[][] vertices =
new float[DEFAULT_VERTICES][VERTEX_FIELD_COUNT];
protected int vertexCount; // total number of vertices
// ........................................................
protected boolean bezierInited = false;
public int bezierDetail = 20;
// used by both curve and bezier, so just init here
protected PMatrix3D bezierBasisMatrix =
new PMatrix3D(-1, 3, -3, 1,
3, -6, 3, 0,
-3, 3, 0, 0,
1, 0, 0, 0);
//protected PMatrix3D bezierForwardMatrix;
protected PMatrix3D bezierDrawMatrix;
// ........................................................
protected boolean curveInited = false;
public int curveDetail = 20;
public float curveTightness = 0;
// catmull-rom basis matrix, perhaps with optional s parameter
protected PMatrix3D curveBasisMatrix;
protected PMatrix3D curveDrawMatrix;
protected PMatrix3D bezierBasisInverse;
protected PMatrix3D curveToBezierMatrix;
// ........................................................
// spline vertices
protected float[][] curveVertices;
protected int curveVertexCount;
// ........................................................
// precalculate sin/cos lookup tables [toxi]
// circle resolution is determined from the actual used radii
// passed to ellipse() method. this will automatically take any
// scale transformations into account too
// [toxi 031031]
// changed table's precision to 0.5 degree steps
// introduced new vars for more flexible code
static final protected float[] sinLUT;
static final protected float[] cosLUT;
static final protected float SINCOS_PRECISION = 0.5f;
static final protected int SINCOS_LENGTH = (int) (360f / SINCOS_PRECISION);
static {
sinLUT = new float[SINCOS_LENGTH];
cosLUT = new float[SINCOS_LENGTH];
for (int i = 0; i < SINCOS_LENGTH; i++) {
sinLUT[i] = (float) Math.sin(i * DEG_TO_RAD * SINCOS_PRECISION);
cosLUT[i] = (float) Math.cos(i * DEG_TO_RAD * SINCOS_PRECISION);
}
}
// ........................................................
/**
* Internal buffer used by the text() functions
* because the String object is slow
*/
protected char[] textBuffer = new char[8 * 1024];
protected char[] textWidthBuffer = new char[8 * 1024];
protected int textBreakCount;
protected int[] textBreakStart;
protected int[] textBreakStop;
// ........................................................
public boolean edge = true;
// ........................................................
/// normal calculated per triangle
static protected final int NORMAL_MODE_AUTO = 0;
/// one normal manually specified per shape
static protected final int NORMAL_MODE_SHAPE = 1;
/// normals specified for each shape vertex
static protected final int NORMAL_MODE_VERTEX = 2;
/// Current mode for normals, one of AUTO, SHAPE, or VERTEX
protected int normalMode;
protected boolean autoNormal;
/** Current normal vector. */
public float normalX, normalY, normalZ;
// ........................................................
/**
* Sets whether texture coordinates passed to
* vertex() calls will be based on coordinates that are
* based on the IMAGE or NORMALIZED.
*/
public int textureMode = IMAGE;
/**
* Current horizontal coordinate for texture, will always
* be between 0 and 1, even if using textureMode(IMAGE).
*/
public float textureU;
/** Current vertical coordinate for texture, see above. */
public float textureV;
/** Current image being used as a texture */
public PImage textureImage;
// ........................................................
// [toxi031031] new & faster sphere code w/ support flexible resolutions
// will be set by sphereDetail() or 1st call to sphere()
protected float[] sphereX;
protected float[] sphereY;
protected float[] sphereZ;
/// Number of U steps (aka "theta") around longitudinally spanning 2*pi
public int sphereDetailU = 0;
/// Number of V steps (aka "phi") along latitudinally top-to-bottom spanning pi
public int sphereDetailV = 0;
//////////////////////////////////////////////////////////////
// INTERNAL
// Most renderers will only override the default implementation of one or
// two of the setXxxx() methods, so they're broken out here since the
// default implementations for each are simple, obvious, and common.
// They're also separate to avoid a monolithic and fragile constructor.
public PGraphics() {
// In 3.1.2, giving up on the async image saving as the default
hints[DISABLE_ASYNC_SAVEFRAME] = true;
}
public void setParent(PApplet parent) { // ignore
this.parent = parent;
// Some renderers (OpenGL) need to know what smoothing level will be used
// before the rendering surface is even created.
smooth = parent.sketchSmooth();
pixelDensity = parent.sketchPixelDensity();
}
/**
* Set (or unset) this as the main drawing surface. Meaning that it can
* safely be set to opaque (and given a default gray background), or anything
* else that goes along with that.
*/
public void setPrimary(boolean primary) { // ignore
this.primaryGraphics = primary;
// base images must be opaque (for performance and general
// headache reasons.. argh, a semi-transparent opengl surface?)
// use createGraphics() if you want a transparent surface.
if (primaryGraphics) {
format = RGB;
}
}
public void setPath(String path) { // ignore
this.path = path;
}
// public void setQuality(int samples) { // ignore
// this.quality = samples;
// }
/**
* The final step in setting up a renderer, set its size of this renderer.
* This was formerly handled by the constructor, but instead it's been broken
* out so that setParent/setPrimary/setPath can be handled differently.
* <p/>
* Important: this is ignored by the Methods task because otherwise it will
* override setSize() in PApplet/Applet/Component, which will 1) not call
* super.setSize(), and 2) will cause the renderer to be resized from the
* event thread (EDT), causing a nasty crash as it collides with the
* animation thread.
*/
public void setSize(int w, int h) { // ignore
width = w;
height = h;
pixelWidth = width * pixelDensity;
pixelHeight = height * pixelDensity;
reapplySettings = true;
}
// /**
// * Allocate memory or an image buffer for this renderer.
// */
// protected void allocate() { }
/**
* Handle any shutdown for this graphics context.
* <p>
* This is called when a sketch is shut down and this renderer was
* specified using the size() command, or inside endRecord() and
* endRaw(), in order to shut things off.
*/
public void dispose() { // ignore
if (primaryGraphics && asyncImageSaver != null) {
asyncImageSaver.dispose();
asyncImageSaver = null;
}
}
public PSurface createSurface() { // ignore
return surface = new PSurfaceNone(this);
}
//////////////////////////////////////////////////////////////
// IMAGE METADATA FOR THIS RENDERER
/**
* Store data of some kind for the renderer that requires extra metadata of
* some kind. Usually this is a renderer-specific representation of the
* image data, for instance a BufferedImage with tint() settings applied for
* PGraphicsJava2D, or resized image data and OpenGL texture indices for
* PGraphicsOpenGL.
* @param image The image to be stored
* @param storage The metadata required by the renderer
*/
public void setCache(PImage image, Object storage) { // ignore
cacheMap.put(image, storage);
}
/**
* Get cache storage data for the specified renderer. Because each renderer
* will cache data in different formats, it's necessary to store cache data
* keyed by the renderer object. Otherwise, attempting to draw the same
* image to both a PGraphicsJava2D and a PGraphicsOpenGL will cause errors.
* @return metadata stored for the specified renderer
*/
public Object getCache(PImage image) { // ignore
return cacheMap.get(image);
}
/**
* Remove information associated with this renderer from the cache, if any.
* @param image The image whose cache data should be removed
*/
public void removeCache(PImage image) { // ignore
cacheMap.remove(image);
}
//////////////////////////////////////////////////////////////
// FRAME
// /**
// * Some renderers have requirements re: when they are ready to draw.
// */
// public boolean canDraw() { // ignore
// return true;
// }
// removing because renderers will have their own animation threads and
// can handle this however they wish
// /**
// * Try to draw, or put a draw request on the queue.
// */
// public void requestDraw() { // ignore
// parent.handleDraw();
// }
/**
*
* Sets the default properties for a <b>PGraphics</b> object. It should be called
* before anything is drawn into the object.
*
* <h3>Advanced</h3>
* When creating your own PGraphics, you should call this before
* drawing anything.
*
* @webref pgraphics:method
* @webBrief Sets the default properties for a <b>PGraphics</b> object
*/
public void beginDraw() { // ignore
}
/**
*
* Finalizes the rendering of a <b>PGraphics</b> object so that it can be shown on screen.
*
* <h3>Advanced</h3>
* <p/>
* When creating your own PGraphics, you should call this when
* you're finished drawing.
*
* @webref pgraphics:method
* @webBrief Finalizes the rendering of a <b>PGraphics</b> object so that it can be shown on screen
* @brief Finalizes the rendering of a PGraphics object
*/
public void endDraw() { // ignore
}
public PGL beginPGL() {
showMethodWarning("beginGL");
return null;
}
public void endPGL() {
showMethodWarning("endGL");
}
public void flush() {
// no-op, mostly for P3D to write sorted stuff
}
protected void checkSettings() {
if (!settingsInited) defaultSettings();
if (reapplySettings) reapplySettings();
}
/**
* Set engine's default values. This has to be called by PApplet,
* somewhere inside setup() or draw() because it talks to the
* graphics buffer, meaning that for subclasses like OpenGL, there
* needs to be a valid graphics context to mess with otherwise
* you'll get some good crashing action.
* <p/>
* This is currently called by checkSettings(), during beginDraw().
*/
protected void defaultSettings() { // ignore
// System.out.println("PGraphics.defaultSettings() " + width + " " + height);
// //smooth(); // 2.0a5
// if (quality > 0) { // 2.0a5
// smooth();
// } else {
// noSmooth();
// }
colorMode(RGB, 255);
fill(255);
stroke(0);
// as of 0178, no longer relying on local versions of the variables
// being set, because subclasses may need to take extra action.
strokeWeight(DEFAULT_STROKE_WEIGHT);
strokeJoin(DEFAULT_STROKE_JOIN);
strokeCap(DEFAULT_STROKE_CAP);
// init shape stuff
shape = 0;
rectMode(CORNER);
ellipseMode(DIAMETER);
autoNormal = true;
// no current font
textFont = null;
textSize = 12;
textLeading = 14;
textAlign = LEFT;
textMode = MODEL;
// if this fella is associated with a component, then clear its background.
// if it's been created by someone else through createGraphics,
// they have to call background() themselves, otherwise everything gets
// a gray background (when just a transparent surface or an empty pdf
// is what's desired).
// this background() call is for the Java 2D and OpenGL renderers.
if (primaryGraphics) {
//System.out.println("main drawing surface bg " + getClass().getName());
background(backgroundColor);
}
blendMode(BLEND);
settingsInited = true;
// defaultSettings() overlaps reapplySettings(), don't do both
reapplySettings = false;
}
/**
* Re-apply current settings. Some methods, such as textFont(), require that
* their methods be called (rather than simply setting the textFont variable)
* because they affect the graphics context, or they require parameters from
* the context (e.g. getting native fonts for text).
* <p/>
* This will only be called from an allocate(), which is only called from
* size(), which is safely called from inside beginDraw(). And it cannot be
* called before defaultSettings(), so we should be safe.
*/
protected void reapplySettings() {
// This might be called by allocate... So if beginDraw() has never run,
// we don't want to reapply here, we actually just need to let
// defaultSettings() get called a little from inside beginDraw().
if (!settingsInited) return; // if this is the initial setup, no need to reapply
colorMode(colorMode, colorModeX, colorModeY, colorModeZ);
if (fill) {
// PApplet.println(" fill " + PApplet.hex(fillColor));
fill(fillColor);
} else {
noFill();
}
if (stroke) {
stroke(strokeColor);
// The if() statements should be handled inside the functions,
// otherwise an actual reset/revert won't work properly.
//if (strokeWeight != DEFAULT_STROKE_WEIGHT) {
strokeWeight(strokeWeight);
//}
// if (strokeCap != DEFAULT_STROKE_CAP) {
strokeCap(strokeCap);
// }
// if (strokeJoin != DEFAULT_STROKE_JOIN) {
strokeJoin(strokeJoin);
// }
} else {
noStroke();
}
if (tint) {
tint(tintColor);
} else {
noTint();
}
// if (smooth) {
// smooth();
// } else {
// // Don't bother setting this, cuz it'll anger P3D.
// noSmooth();
// }
if (textFont != null) {
// System.out.println(" textFont in reapply is " + textFont);
// textFont() resets the leading, so save it in case it's changed
float saveLeading = textLeading;
textFont(textFont, textSize);
textLeading(saveLeading);
}
textMode(textMode);
textAlign(textAlign, textAlignY);
background(backgroundColor);
blendMode(blendMode);
reapplySettings = false;
}
// inherit from PImage
//public void resize(int wide, int high){ }
//////////////////////////////////////////////////////////////
// HINTS
/**
*
* Set various hints and hacks for the renderer. This is used to handle
* obscure rendering features that cannot be implemented in a consistent
* manner across renderers. Many options will often graduate to standard
* features instead of hints over time.
* <br/> <br/>
* <b>hint(ENABLE_OPENGL_4X_SMOOTH)</b>- Enable 4x anti-aliasing for P3D. This
* can help force anti-aliasing if it has not been enabled by the user. On
* some graphics cards, this can also be set by the graphics driver's
* control panel, however not all cards make this available. This hint must
* be called immediately after the <b>size()</b> command because it resets the
* renderer, obliterating any settings and anything drawn (and like <b>size()</b>,
* re-running the code that came before it again).
* <br/> <br/>
* <b>hint(DISABLE_OPENGL_2X_SMOOTH)</b> - In Processing 1.0, Processing always
* enables 2x smoothing when the P3D renderer is used. This hint disables
* the default 2x smoothing and returns the smoothing behavior found in
* earlier releases, where <b>smooth()</b> and <b>noSmooth()</b> could be used to enable
* and disable smoothing, though the quality was inferior.
* <br/> <br/>
* <b>hint(ENABLE_NATIVE_FONTS)</b> - Use the native version fonts when they are
* installed, rather than the bitmapped version from a .vlw file. This is
* useful with the default (or JAVA2D) renderer setting, as it will improve
* font rendering speed. This is not enabled by default, because it can be
* misleading while testing because the type will look great on your
* machine (because you have the font installed) but lousy on others'
* machines if the identical font is unavailable. This option can only be
* set per-sketch, and must be called before any use of <b>textFont()</b>.
* <br/> <br/>
* <b>hint(DISABLE_DEPTH_TEST)</b> - Disable the zbuffer, allowing you to draw on
* top of everything at will. When depth testing is disabled, items will be
* drawn to the screen sequentially, like a painting. This hint is most
* often used to draw in 3D, then draw in 2D on top of it (for instance, to
* draw GUI controls in 2D on top of a 3D interface). Starting in release
* 0149, this will also clear the depth buffer. Restore the default with
* <b>hint(ENABLE_DEPTH_TEST)</b>, but note that with the depth buffer cleared,
* any 3D drawing that happens later in <b>draw()</b> will ignore existing shapes
* on the screen.
* <br/> <br/>
* <b>hint(ENABLE_DEPTH_SORT)</b> - Enable primitive z-sorting of triangles and
* lines in P3D and OPENGL. This can slow performance considerably, and the
* algorithm is not yet perfect. Restore the default with <b>hint(DISABLE_DEPTH_SORT)</b>.
* <br/> <br/>
* <b>hint(DISABLE_OPENGL_ERROR_REPORT)</b> - Speeds up the P3D renderer setting
* by not checking for errors while running. Undo with <b>hint(ENABLE_OPENGL_ERROR_REPORT)</b>.
* <br/> <br/>
* <b>hint(ENABLE_BUFFER_READING)</b> - Depth and stencil buffers in P2D/P3D will be
* down-sampled to make PGL#readPixels work with multisampling. Enabling this
* introduces some overhead, so if you experience bad performance, disable
* multisampling with <b>noSmooth()</b> instead. This hint is not intended to be
* enabled and disabled repeatedly, so call this once in <b>setup()</b> or after
* creating your PGraphics2D/3D. You can restore the default with
* <b>hint(DISABLE_BUFFER_READING)</b> if you don't plan to read depth from
* this <b>PGraphics</b> anymore.
* <br/> <br/>
* <b>hint(ENABLE_KEY_REPEAT)</b> - Auto-repeating key events are discarded
* by default (works only in P2D/P3D); use this hint to get all the key events
* (including auto-repeated). Call <b>hint(DISABLE_KEY_REPEAT)</b> to get events
* only when the key goes physically up or down.
* <br/> <br/>
* <b>hint(DISABLE_ASYNC_SAVEFRAME)</b> - P2D/P3D only - <b>save()</b> and <b>saveFrame()</b>
* will not use separate threads for saving and will block until the image
* is written to the drive. This was the default behavior in 3.0b7 and before.
* To enable, call <b>hint(ENABLE_ASYNC_SAVEFRAME)</b>.
*
* @webref rendering
* @webBrief Set various hints and hacks for the renderer
* @param which name of the hint to be enabled or disabled
* @see PGraphics
* @see PApplet#createGraphics(int, int, String, String)
* @see PApplet#size(int, int)
*/
@SuppressWarnings("deprecation")
public void hint(int which) {
if (which == ENABLE_NATIVE_FONTS ||
which == DISABLE_NATIVE_FONTS) {
showWarning("hint(ENABLE_NATIVE_FONTS) no longer supported. " +
"Use createFont() instead.");
}
if (which == ENABLE_KEY_REPEAT) {
parent.keyRepeatEnabled = true;
} else if (which == DISABLE_KEY_REPEAT) {
parent.keyRepeatEnabled = false;
}
if (which > 0) {
hints[which] = true;
} else {
hints[-which] = false;
}
}
//////////////////////////////////////////////////////////////
// VERTEX SHAPES
/**
* Start a new shape of type POLYGON
*/
public void beginShape() {
beginShape(POLYGON);
}
/**
*
* Using the <b>beginShape()</b> and <b>endShape()</b> functions allow creating
* more complex forms. <b>beginShape()</b> begins recording vertices for a shape
* and <b>endShape()</b> stops recording. The value of the <b>kind</b> parameter
* tells it which types of shapes to create from the provided vertices. With no
* mode specified, the shape can be any irregular polygon. The parameters
* available for beginShape() are POINTS, LINES, TRIANGLES, TRIANGLE_FAN,
* TRIANGLE_STRIP, QUADS, and QUAD_STRIP. After calling the <b>beginShape()</b>
* function, a series of <b>vertex()</b> commands must follow. To stop drawing
* the shape, call <b>endShape()</b>. The <b>vertex()</b> function with two
* parameters specifies a position in 2D and the <b>vertex()</b> function with
* three parameters specifies a position in 3D. Each shape will be outlined with
* the current stroke color and filled with the fill color. <br />
* <br />
* Transformations such as <b>translate()</b>, <b>rotate()</b>, and
* <b>scale()</b> do not work within <b>beginShape()</b>. It is also not
* possible to use other shapes, such as <b>ellipse()</b> or <b>rect()</b>
* within <b>beginShape()</b>. <br />
* <br />
* The P2D and P3D renderers allow <b>stroke()</b> and <b>fill()</b> to be
* altered on a per-vertex basis, but the default renderer does not. Settings
* such as <b>strokeWeight()</b>, <b>strokeCap()</b>, and <b>strokeJoin()</b>
* cannot be changed while inside a <b>beginShape()</b>/<b>endShape()</b> block
* with any renderer.
*
* @webref shape:vertex
* @webBrief Using the <b>beginShape()</b> and <b>endShape()</b> functions allow
* creating more complex forms
* @param kind Either POINTS, LINES, TRIANGLES, TRIANGLE_FAN, TRIANGLE_STRIP,
* QUADS, or QUAD_STRIP
* @see PShape
* @see PGraphics#endShape()
* @see PGraphics#vertex(float, float, float, float, float)
* @see PGraphics#curveVertex(float, float, float)
* @see PGraphics#bezierVertex(float, float, float, float, float, float, float,
* float, float)
*/
public void beginShape(int kind) {
shape = kind;
}
/**
* Sets whether the upcoming vertex is part of an edge.
* Equivalent to glEdgeFlag(), for people familiar with OpenGL.
*/
public void edge(boolean edge) {
this.edge = edge;
}
/**
*
* Sets the current normal vector. Used for drawing three-dimensional
* shapes and surfaces, <b>normal()</b> specifies a vector perpendicular
* to a shape's surface which, in turn, determines how lighting affects it.
* Processing attempts to automatically assign normals to shapes, but since
* that's imperfect, this is a better option when you want more control.
* This function is identical to <b>glNormal3f()</b> in OpenGL.
*
* @webref lights_camera:lights
* @webBrief Sets the current normal vector
* @param nx x direction
* @param ny y direction
* @param nz z direction
* @see PGraphics#beginShape(int)
* @see PGraphics#endShape(int)
* @see PGraphics#lights()
*/
public void normal(float nx, float ny, float nz) {
normalX = nx;
normalY = ny;
normalZ = nz;
// if drawing a shape and the normal hasn't been set yet,
// then we need to set the normals for each vertex so far
if (shape != 0) {
if (normalMode == NORMAL_MODE_AUTO) {
// One normal per begin/end shape
normalMode = NORMAL_MODE_SHAPE;
} else if (normalMode == NORMAL_MODE_SHAPE) {
// a separate normal for each vertex
normalMode = NORMAL_MODE_VERTEX;
}
}
}
public void attribPosition(String name, float x, float y, float z) {
showMissingWarning("attrib");
}
public void attribNormal(String name, float nx, float ny, float nz) {
showMissingWarning("attrib");
}
public void attribColor(String name, int color) {
showMissingWarning("attrib");
}
public void attrib(String name, float... values) {
showMissingWarning("attrib");
}
public void attrib(String name, int... values) {
showMissingWarning("attrib");
}
public void attrib(String name, boolean... values) {
showMissingWarning("attrib");
}
/**
*
* Sets the coordinate space for texture mapping. The default mode is
* <b>IMAGE</b>, which refers to the actual coordinates of the image.
* <b>NORMAL</b> refers to a normalized space of values ranging from 0 to 1.
* This function only works with the P2D and P3D renderers.<br />
* <br />
* With <b>IMAGE</b>, if an image is 100 x 200 pixels, mapping the image onto
* the entire size of a quad would require the points (0,0) (100, 0) (100,200)
* (0,200). The same mapping in <b>NORMAL</b> is (0,0) (1,0) (1,1) (0,1).
*
* @webref image:textures
* @webBrief Sets the coordinate space for texture mapping
* @param mode either IMAGE or NORMAL
* @see PGraphics#texture(PImage)
* @see PGraphics#textureWrap(int)
*/
public void textureMode(int mode) {
if (mode != IMAGE && mode != NORMAL) {
throw new RuntimeException("textureMode() only supports IMAGE and NORMAL");
}
this.textureMode = mode;
}
/**
* Defines if textures repeat or draw once within a texture map.
* The two parameters are CLAMP (the default behavior) and REPEAT.
* This function only works with the P2D and P3D renderers.
*
* @webref image:textures
* @webBrief Defines if textures repeat or draw once within a texture map
* @param wrap Either CLAMP (default) or REPEAT
* @see PGraphics#texture(PImage)
* @see PGraphics#textureMode(int)
*/
public void textureWrap(int wrap) {
showMissingWarning("textureWrap");
}
/**
* Sets a texture to be applied to vertex points. The <b>texture()</b> function
* must be called between <b>beginShape()</b> and <b>endShape()</b> and before
* any calls to <b>vertex()</b>. This function only works with the P2D and P3D
* renderers.
* <p/>
* When textures are in use, the fill color is ignored. Instead, use
* <b>tint()</b> to specify the color of the texture as it is applied to the
* shape.
*
* @webref image:textures
* @webBrief Sets a texture to be applied to vertex points
* @param image reference to a PImage object
* @see PGraphics#textureMode(int)
* @see PGraphics#textureWrap(int)
* @see PGraphics#beginShape(int)
* @see PGraphics#endShape(int)
* @see PGraphics#vertex(float, float, float, float, float)
*/
public void texture(PImage image) {
textureImage = image;
}
/**
* Removes texture image for current shape.
* Needs to be called between beginShape and endShape
*
*/
public void noTexture() {
textureImage = null;
}
protected void vertexCheck() {
if (vertexCount == vertices.length) {
float[][] temp = new float[vertexCount << 1][VERTEX_FIELD_COUNT];
System.arraycopy(vertices, 0, temp, 0, vertexCount);
vertices = temp;
}
}
public void vertex(float x, float y) {
vertexCheck();
float[] vertex = vertices[vertexCount];
curveVertexCount = 0;
vertex[X] = x;
vertex[Y] = y;
vertex[Z] = 0;
vertex[EDGE] = edge ? 1 : 0;
// if (fill) {
// vertex[R] = fillR;
// vertex[G] = fillG;
// vertex[B] = fillB;
// vertex[A] = fillA;
// }
boolean textured = textureImage != null;
if (fill || textured) {
if (!textured) {
vertex[R] = fillR;
vertex[G] = fillG;
vertex[B] = fillB;
vertex[A] = fillA;
} else {
if (tint) {
vertex[R] = tintR;
vertex[G] = tintG;
vertex[B] = tintB;
vertex[A] = tintA;
} else {
vertex[R] = 1;
vertex[G] = 1;
vertex[B] = 1;
vertex[A] = 1;
}
}
}
if (stroke) {
vertex[SR] = strokeR;
vertex[SG] = strokeG;
vertex[SB] = strokeB;
vertex[SA] = strokeA;
vertex[SW] = strokeWeight;
}
if (textured) {
vertex[U] = textureU;
vertex[V] = textureV;
}
if (autoNormal) {
float norm2 = normalX * normalX + normalY * normalY + normalZ * normalZ;
if (norm2 < EPSILON) {
vertex[HAS_NORMAL] = 0;
} else {
if (Math.abs(norm2 - 1) > EPSILON) {
// The normal vector is not normalized.
float norm = PApplet.sqrt(norm2);
normalX /= norm;
normalY /= norm;
normalZ /= norm;
}
vertex[HAS_NORMAL] = 1;
}
} else {
vertex[HAS_NORMAL] = 1;
}
vertexCount++;
}
public void vertex(float x, float y, float z) {
vertexCheck();
float[] vertex = vertices[vertexCount];
// only do this if we're using an irregular (POLYGON) shape that
// will go through the triangulator. otherwise it'll do things like
// disappear in mathematically odd ways
// https://download.processing.org/bugzilla/444.html
if (shape == POLYGON) {
if (vertexCount > 0) {
float[] pvertex = vertices[vertexCount-1];
if ((Math.abs(pvertex[X] - x) < EPSILON) &&
(Math.abs(pvertex[Y] - y) < EPSILON) &&
(Math.abs(pvertex[Z] - z) < EPSILON)) {
// this vertex is identical, don't add it,
// because it will anger the triangulator
return;
}
}
}
// User called vertex(), so that invalidates anything queued up for curve
// vertices. If this is internally called by curveVertexSegment,
// then curveVertexCount will be saved and restored.
curveVertexCount = 0;
vertex[X] = x;
vertex[Y] = y;
vertex[Z] = z;
vertex[EDGE] = edge ? 1 : 0;
boolean textured = textureImage != null;
if (fill || textured) {
if (!textured) {
vertex[R] = fillR;
vertex[G] = fillG;
vertex[B] = fillB;
vertex[A] = fillA;
} else {
if (tint) {
vertex[R] = tintR;
vertex[G] = tintG;
vertex[B] = tintB;
vertex[A] = tintA;
} else {
vertex[R] = 1;
vertex[G] = 1;
vertex[B] = 1;
vertex[A] = 1;
}
}
vertex[AR] = ambientR;
vertex[AG] = ambientG;
vertex[AB] = ambientB;
vertex[SPR] = specularR;
vertex[SPG] = specularG;
vertex[SPB] = specularB;
//vertex[SPA] = specularA;
vertex[SHINE] = shininess;
vertex[ER] = emissiveR;
vertex[EG] = emissiveG;
vertex[EB] = emissiveB;
}
if (stroke) {
vertex[SR] = strokeR;
vertex[SG] = strokeG;
vertex[SB] = strokeB;
vertex[SA] = strokeA;
vertex[SW] = strokeWeight;
}
if (textured) {
vertex[U] = textureU;
vertex[V] = textureV;
}
if (autoNormal) {
float norm2 = normalX * normalX + normalY * normalY + normalZ * normalZ;
if (norm2 < EPSILON) {
vertex[HAS_NORMAL] = 0;
} else {
if (Math.abs(norm2 - 1) > EPSILON) {
// The normal vector is not normalized.
float norm = PApplet.sqrt(norm2);
normalX /= norm;
normalY /= norm;
normalZ /= norm;
}
vertex[HAS_NORMAL] = 1;
}
} else {
vertex[HAS_NORMAL] = 1;
}
vertex[NX] = normalX;
vertex[NY] = normalY;
vertex[NZ] = normalZ;
vertex[BEEN_LIT] = 0;
vertexCount++;
}
/**
* Used by renderer subclasses or PShape to efficiently pass in already
* formatted vertex information.
* @param v vertex parameters, as a float array of length VERTEX_FIELD_COUNT
*/
public void vertex(float[] v) {
vertexCheck();
curveVertexCount = 0;
float[] vertex = vertices[vertexCount];
System.arraycopy(v, 0, vertex, 0, VERTEX_FIELD_COUNT);
vertexCount++;
}
public void vertex(float x, float y, float u, float v) {
vertexTexture(u, v);
vertex(x, y);
}
/**
*
* All shapes are constructed by connecting a series of vertices.
* <b>vertex()</b> is used to specify the vertex coordinates for points, lines,
* triangles, quads, and polygons. It is used exclusively within the
* <b>beginShape()</b> and <b>endShape()</b> functions. <br />
* <br />
* Drawing a vertex in 3D using the <b>z</b> parameter requires the P3D
* parameter in combination with size, as shown in the above example. <br />
* <br />
* This function is also used to map a texture onto geometry. The
* <b>texture()</b> function declares the texture to apply to the geometry and
* the <b>u</b> and <b>v</b> coordinates set define the mapping of this texture
* to the form. By default, the coordinates used for <b>u</b> and <b>v</b> are
* specified in relation to the image's size in pixels, but this relation can be
* changed with <b>textureMode()</b>.
*
* @webref shape:vertex
* @webBrief All shapes are constructed by connecting a series of vertices
* @param x x-coordinate of the vertex
* @param y y-coordinate of the vertex
* @param z z-coordinate of the vertex
* @param u horizontal coordinate for the texture mapping
* @param v vertical coordinate for the texture mapping
* @see PGraphics#beginShape(int)
* @see PGraphics#endShape(int)
* @see PGraphics#bezierVertex(float, float, float, float, float, float, float,
* float, float)
* @see PGraphics#quadraticVertex(float, float, float, float, float, float)
* @see PGraphics#curveVertex(float, float, float)
* @see PGraphics#texture(PImage)
*/
public void vertex(float x, float y, float z, float u, float v) {
vertexTexture(u, v);
vertex(x, y, z);
}
// /**
// * Internal method to copy all style information for the given vertex.
// * Can be overridden by subclasses to handle only properties pertinent to
// * that renderer. (e.g. no need to copy the emissive color in P2D)
// */
// protected void vertexStyle() {
// }
/**
* Set (U, V) coords for the next vertex in the current shape.
* This is ugly as its own function, and will (almost?) always be
* coincident with a call to vertex. As of beta, this was moved to
* the protected method you see here, and called from an optional
* param of and overloaded vertex().
* <p/>
* The parameters depend on the current textureMode. When using
* textureMode(IMAGE), the coordinates will be relative to the size
* of the image texture, when used with textureMode(NORMAL),
* they'll be in the range 0..1.
* <p/>
* Used by both PGraphics2D (for images) and PGraphics3D.
*/
protected void vertexTexture(float u, float v) {
if (textureImage == null) {
throw new RuntimeException("You must first call texture() before " +
"using u and v coordinates with vertex()");
}
if (textureMode == IMAGE) {
u /= textureImage.width;
v /= textureImage.height;
}
textureU = u;
textureV = v;
if (textureU < 0) textureU = 0;
else if (textureU > 1) textureU = 1;
if (textureV < 0) textureV = 0;
else if (textureV > 1) textureV = 1;
}
// /** This feature is in testing, do not use or rely upon its implementation */
// public void breakShape() {
// showWarning("This renderer cannot currently handle concave shapes, " +
// "or shapes with holes.");
// }
/**
* Use the <b>beginContour()</b> and <b>endContour()</b> function to
* create negative shapes within shapes such as the center of the
* letter "O". <b>beginContour()</b> begins recording vertices for the
* shape and <b>endContour()</b> stops recording. The vertices that
* define a negative shape must "wind" in the opposite direction from
* the exterior shape. First draw vertices for the exterior shape in
* clockwise order, then for internal shapes, draw vertices counterclockwise.<br />
* <br />
* These functions can only be used within a <b>beginShape()</b>/<b>endShape()</b>
* pair and transformations such as <b>translate()</b>, <b>rotate()</b>, and
* <b>scale()</b> do not work within a <b>beginContour()</b>/<b>endContour()</b>
* pair. It is also not possible to use other shapes, such as <b>ellipse()</b>
* or <b>rect()</b> within.
*
* @webref shape:vertex
* @webBrief Begins recording vertices for the shape
*/
public void beginContour() {
showMissingWarning("beginContour");
}
/**
* Use the <b>beginContour()</b> and <b>endContour()</b> function to
* create negative shapes within shapes such as the center of the
* letter "O". <b>beginContour()</b> begins recording vertices for
* the shape and <b>endContour()</b> stops recording. The vertices
* that define a negative shape must "wind" in the opposite direction
* from the exterior shape. First draw vertices for the exterior shape
* in clockwise order, then for internal shapes, draw vertices counterclockwise.<br />
* <br />
* These functions can only be used within a <b>beginShape()</b>/<b>endShape()</b>
* pair and transformations such as <b>translate()</b>, <b>rotate()</b>, and
* <b>scale()</b> do not work within a <b>beginContour()</b>/<b>endContour()</b>
* pair. It is also not possible to use other shapes, such as <b>ellipse()</b>
* or <b>rect()</b> within.
*
* @webref shape:vertex
* @webBrief Stops recording vertices for the shape
*/
public void endContour() {
showMissingWarning("endContour");
}
public void endShape() {
endShape(OPEN);
}
/**
*
* The <b>endShape()</b> function is the companion to <b>beginShape()</b>
* and may only be called after <b>beginShape()</b>. When <b>endshape()</b>
* is called, all the image data defined since the previous call to
* <b>beginShape()</b> is written into the image buffer. The constant CLOSE
* as the value for the MODE parameter to close the shape (to connect the
* beginning and the end).
*
* @webref shape:vertex
* @webBrief the companion to <b>beginShape()</b> and may only be called after <b>beginShape()</b>
* @param mode use CLOSE to close the shape
* @see PShape
* @see PGraphics#beginShape(int)
*/
public void endShape(int mode) {
}
//////////////////////////////////////////////////////////////
// SHAPE I/O
/**
* Loads geometry into a variable of type <b>PShape</b>. SVG and OBJ
* files may be loaded. To load correctly, the file must be located
* in the data directory of the current sketch. In most cases,
* <b>loadShape()</b> should be used inside <b>setup()</b> because
* loading shapes inside <b>draw()</b> will reduce the speed of a sketch.<br />
* <br />
* Alternatively, the file maybe be loaded from anywhere on the local
* computer using an absolute path (something that starts with / on
* Unix and Linux, or a drive letter on Windows), or the filename
* parameter can be a URL for a file found on a network.<br />
* <br />
* If the file is not available or an error occurs, <b>null</b> will
* be returned and an error message will be printed to the console.
* The error message does not halt the program, however the null value
* may cause a NullPointerException if your code does not check whether
* the value returned is null.<br />
*
* @webref shape
* @webBrief Loads geometry into a variable of type <b>PShape</b>
* @param filename name of file to load, can be .svg or .obj
* @see PShape
* @see PApplet#createShape()
*/
public PShape loadShape(String filename) {
return loadShape(filename, null);
}
/**
* @nowebref
*/
public PShape loadShape(String filename, String options) {
showMissingWarning("loadShape");
return null;
}
//////////////////////////////////////////////////////////////
// SHAPE CREATION
/**
* The <b>createShape()</b> function is used to define a new shape.
* Once created, this shape can be drawn with the <b>shape()</b>
* function. The basic way to use the function defines new primitive
* shapes. One of the following parameters are used as the first
* parameter: <b>ELLIPSE</b>, <b>RECT</b>, <b>ARC</b>, <b>TRIANGLE</b>,
* <b>SPHERE</b>, <b>BOX</b>, <b>QUAD</b>, or <b>LINE</b>. The
* parameters for each of these different shapes are the same as their
* corresponding functions: <b>ellipse()</b>, <b>rect()</b>, <b>arc()</b>,
* <b>triangle()</b>, <b>sphere()</b>, <b>box()</b>, <b>quad()</b>, and
* <b>line()</b>. The first example above clarifies how this works.<br />
* <br />
* Custom, unique shapes can be made by using <b>createShape()</b> without
* a parameter. After the shape is started, the drawing attributes and
* geometry can be set directly to the shape within the <b>beginShape()</b>
* and <b>endShape()</b> methods. See the second example above for specifics,
* and the reference for <b>beginShape()</b> for all of its options.<br />
* <br />
* The <b>createShape()</b> function can also be used to make a complex
* shape made of other shapes. This is called a "group" and it's created by
* using the parameter <b>GROUP</b> as the first parameter. See the fourth
* example above to see how it works.<br />
* <br />
* After using <b>createShape()</b>, stroke and fill color can be set by
* calling methods like <b>setFill()</b> and <b>setStroke()</b>, as seen
* in the examples above. The complete list of methods and fields for the
* PShape
|
PGraphics
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java
|
{
"start": 24078,
"end": 24444
}
|
interface ____ {
String PREFIX = HdfsClientConfigKeys.PREFIX + "hedged.read.";
String THRESHOLD_MILLIS_KEY = PREFIX + "threshold.millis";
long THRESHOLD_MILLIS_DEFAULT = 500;
String THREADPOOL_SIZE_KEY = PREFIX + "threadpool.size";
int THREADPOOL_SIZE_DEFAULT = 0;
}
/** dfs.client.read.striped configuration properties */
|
HedgedRead
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jersey/src/test/java/org/springframework/boot/jersey/autoconfigure/JerseyAutoConfigurationCustomApplicationTests.java
|
{
"start": 2476,
"end": 2757
}
|
class ____ {
@GET
public String message() {
return "Hello World";
}
}
@Configuration(proxyBeanMethods = false)
@Import({ TomcatServletWebServerAutoConfiguration.class, JerseyAutoConfiguration.class,
PropertyPlaceholderAutoConfiguration.class })
static
|
TestController
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/CollectionDataType.java
|
{
"start": 4977,
"end": 5115
}
|
class
____ elementDataType.bridgedTo(conversionClass.getComponentType());
}
return elementDataType;
}
}
|
return
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java
|
{
"start": 39825,
"end": 40413
}
|
class ____ extends DstatementContext {
public TerminalNode BREAK() {
return getToken(PainlessParser.BREAK, 0);
}
public BreakContext(DstatementContext ctx) {
copyFrom(ctx);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor<? extends T>) visitor).visitBreak(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
BreakContext
|
java
|
apache__camel
|
components/camel-google/camel-google-calendar/src/generated/java/org/apache/camel/component/google/calendar/CalendarChannelsEndpointConfiguration.java
|
{
"start": 891,
"end": 1480
}
|
class ____ extends GoogleCalendarConfiguration {
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "stop", description="The com.google.api.services.calendar.model.Channel")})
private com.google.api.services.calendar.model.Channel contentChannel;
public com.google.api.services.calendar.model.Channel getContentChannel() {
return contentChannel;
}
public void setContentChannel(com.google.api.services.calendar.model.Channel contentChannel) {
this.contentChannel = contentChannel;
}
}
|
CalendarChannelsEndpointConfiguration
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/PropertiesBeanDefinitionReader.java
|
{
"start": 5879,
"end": 5995
}
|
class ____ common attributes for all views.
* View definitions that define their own parent or carry their own
*
|
and
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/InvokeCallMemberNode.java
|
{
"start": 616,
"end": 1222
}
|
class ____ extends ArgumentsNode {
/* ---- begin visitor ---- */
@Override
public <Scope> void visit(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
irTreeVisitor.visitInvokeCallMember(this, scope);
}
@Override
public <Scope> void visitChildren(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
for (ExpressionNode argumentNode : getArgumentNodes()) {
argumentNode.visit(irTreeVisitor, scope);
}
}
/* ---- end visitor ---- */
public InvokeCallMemberNode(Location location) {
super(location);
}
}
|
InvokeCallMemberNode
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/worldclock/WorldClockServer.java
|
{
"start": 1190,
"end": 1892
}
|
class ____ {
static final int PORT = Integer.parseInt(System.getProperty("port", "8463"));
public static void main(String[] args) throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
try {
ServerBootstrap b = new ServerBootstrap();
b.group(group)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new WorldClockServerInitializer(ServerUtil.buildSslContext()));
b.bind(PORT).sync().channel().closeFuture().sync();
} finally {
group.shutdownGracefully();
}
}
}
|
WorldClockServer
|
java
|
apache__camel
|
components/camel-mail/src/test/java/org/apache/camel/component/mail/security/SslContextParametersMailRouteTest.java
|
{
"start": 1795,
"end": 4509
}
|
class ____ extends CamelTestSupport {
private String email = "USERNAME@gmail.com";
private String username = "USERNAME@gmail.com";
private String imapHost = "imap.gmail.com";
private String smtpHost = "smtp.gmail.com";
private String password = "PASSWORD";
@BindToRegistry("sslContextParameters")
private SSLContextParameters params = MailTestHelper.createSslContextParameters();
@Test
public void testSendAndReceiveMails() throws Exception {
context.addRoutes(new RouteBuilder() {
public void configure() {
from("imaps://" + imapHost + "?username=" + username + "&password=" + password
+ "&delete=false&unseen=true&fetchSize=1&useFixedDelay=true&initialDelay=100&delay=100").to("mock:in");
from("direct:in").to("smtps://" + smtpHost + "?username=" + username + "&password=" + password);
}
});
context.start();
MockEndpoint resultEndpoint = getMockEndpoint("mock:in");
resultEndpoint.expectedBodiesReceived("Test Email Body\r\n");
Map<String, Object> headers = new HashMap<>();
headers.put("To", email);
headers.put("From", email);
headers.put(MailConstants.MAIL_REPLY_TO, email);
headers.put("Subject", "SSL/TLS Test");
template.sendBodyAndHeaders("direct:in", "Test Email Body", headers);
resultEndpoint.assertIsSatisfied();
}
@Test
public void testSendAndReceiveMailsWithCustomTrustStore() throws Exception {
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:in").to("smtps://" + smtpHost + "?username=" + username + "&password=" + password
+ "&sslContextParameters=#sslContextParameters");
}
});
context.start();
Map<String, Object> headers = new HashMap<>();
headers.put("To", email);
headers.put("From", email);
headers.put(MailConstants.MAIL_REPLY_TO, email);
headers.put("Subject", "SSL/TLS Test");
try {
template.sendBodyAndHeaders("direct:in", "Test Email Body", headers);
fail("Should have thrown exception");
} catch (CamelExecutionException e) {
assertTrue(e.getCause().getCause() instanceof SSLHandshakeException);
assertTrue(e.getCause().getCause().getMessage()
.contains("unable to find valid certification path to requested target"));
}
}
/**
* Stop Camel startup.
*/
@Override
public boolean isUseAdviceWith() {
return true;
}
}
|
SslContextParametersMailRouteTest
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileOperations.java
|
{
"start": 887,
"end": 6131
}
|
interface ____<T> {
/**
* Creates a new instance of {@link GenericFile}
*/
GenericFile<T> newGenericFile();
/**
* Sets the endpoint as some implementations need access to the endpoint and how its configured.
*
* @param endpoint the endpoint
*/
void setEndpoint(GenericFileEndpoint<T> endpoint);
/**
* Deletes the file name by name, relative to the current directory
*
* @param name name of the file
* @return true if deleted, false if not
* @throws GenericFileOperationFailedException can be thrown
*/
boolean deleteFile(String name) throws GenericFileOperationFailedException;
/**
* Determines whether the files exists or not
*
* @param name name of the file
* @return true if exists, false if not
* @throws GenericFileOperationFailedException can be thrown
*/
boolean existsFile(String name) throws GenericFileOperationFailedException;
/**
* Renames the file
*
* @param from original name
* @param to the new name
* @return true if renamed, false if not
* @throws GenericFileOperationFailedException can be thrown
*/
boolean renameFile(String from, String to) throws GenericFileOperationFailedException;
/**
* Builds the directory structure. Will test if the folder already exists.
*
* @param directory the directory path to build as a relative string name
* @param absolute whether the directory is an absolute or relative path
* @return true if build or already exists, false if not possible (could be lack
* of permissions)
* @throws GenericFileOperationFailedException can be thrown
*/
boolean buildDirectory(String directory, boolean absolute) throws GenericFileOperationFailedException;
/**
* Retrieves the file
*
* @param name name of the file
* @param exchange stream to write the content of the file into
* @param size the total file size to retrieve, if possible to determine
* @return true if file has been retrieved, false if not
* @throws GenericFileOperationFailedException can be thrown
*/
boolean retrieveFile(String name, Exchange exchange, long size) throws GenericFileOperationFailedException;
/**
* Releases the resources consumed by a retrieved file
*
* @param exchange exchange with the content of the file
* @throws GenericFileOperationFailedException can be thrown
*/
void releaseRetrievedFileResources(Exchange exchange) throws GenericFileOperationFailedException;
/**
* Stores the content as a new remote file (upload)
*
* @param name name of new file
* @param exchange with the content content of the file
* @param size the total file size to store, if possible to determine
* @return true if the file was stored, false if not
* @throws GenericFileOperationFailedException can be thrown
*/
boolean storeFile(String name, Exchange exchange, long size) throws GenericFileOperationFailedException;
/**
* Gets the current remote directory
*
* @return the current directory path
* @throws GenericFileOperationFailedException can be thrown
*/
String getCurrentDirectory() throws GenericFileOperationFailedException;
/**
* Change the current remote directory
*
* @param path the path to change to
* @throws GenericFileOperationFailedException can be thrown
*/
void changeCurrentDirectory(String path) throws GenericFileOperationFailedException;
/**
* Change the current remote directory to the parent
*
* @throws GenericFileOperationFailedException can be thrown
*/
void changeToParentDirectory() throws GenericFileOperationFailedException;
/**
* List the files in the current directory
*
* @return a list of backing objects representing the files
* @throws GenericFileOperationFailedException can be thrown
*/
T[] listFiles() throws GenericFileOperationFailedException;
/**
* List the files in the given remote directory
*
* @param path the remote directory
* @return a list of backing objects representing the files
* @throws GenericFileOperationFailedException can be thrown
*/
T[] listFiles(String path) throws GenericFileOperationFailedException;
}
|
GenericFileOperations
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/InlineDelegateByteBuddyMockMaker.java
|
{
"start": 40869,
"end": 43818
}
|
class ____ implements MockedConstruction.Context {
private static final Map<String, Class<?>> PRIMITIVES = new HashMap<>();
static {
PRIMITIVES.put(boolean.class.getName(), boolean.class);
PRIMITIVES.put(byte.class.getName(), byte.class);
PRIMITIVES.put(short.class.getName(), short.class);
PRIMITIVES.put(char.class.getName(), char.class);
PRIMITIVES.put(int.class.getName(), int.class);
PRIMITIVES.put(long.class.getName(), long.class);
PRIMITIVES.put(float.class.getName(), float.class);
PRIMITIVES.put(double.class.getName(), double.class);
}
private int count;
private final Object[] arguments;
private final Class<?> type;
private final String[] parameterTypeNames;
private InlineConstructionMockContext(
Object[] arguments, Class<?> type, String[] parameterTypeNames) {
this.arguments = arguments;
this.type = type;
this.parameterTypeNames = parameterTypeNames;
}
@Override
public int getCount() {
if (count == 0) {
throw new MockitoConfigurationException(
"mocked construction context is not initialized");
}
return count;
}
@Override
public Constructor<?> constructor() {
Class<?>[] parameterTypes = new Class<?>[parameterTypeNames.length];
int index = 0;
for (String parameterTypeName : parameterTypeNames) {
if (PRIMITIVES.containsKey(parameterTypeName)) {
parameterTypes[index++] = PRIMITIVES.get(parameterTypeName);
} else {
try {
parameterTypes[index++] =
Class.forName(parameterTypeName, false, type.getClassLoader());
} catch (ClassNotFoundException e) {
throw new MockitoException(
"Could not find parameter of type " + parameterTypeName, e);
}
}
}
try {
return type.getDeclaredConstructor(parameterTypes);
} catch (NoSuchMethodException e) {
throw new MockitoException(
join(
"Could not resolve constructor of type",
"",
type.getName(),
"",
"with arguments of types",
Arrays.toString(parameterTypes)),
e);
}
}
@Override
public List<?> arguments() {
return Collections.unmodifiableList(Arrays.asList(arguments));
}
}
}
|
InlineConstructionMockContext
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/targetclass/mixed/PostConstructOnTargetClassAndOutsideAndManySuperclassesWithOverridesTest.java
|
{
"start": 2096,
"end": 2495
}
|
class ____ extends Charlie {
static final List<String> invocations = new ArrayList<>();
@Override
@PostConstruct
void intercept() throws Exception {
invocations.add(MyBean.class.getSimpleName());
}
}
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
@InterceptorBinding
@
|
MyBean
|
java
|
quarkusio__quarkus
|
extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/mutiny/MultiIntBean.java
|
{
"start": 374,
"end": 1083
}
|
class ____ {
public static final String INT_STREAM = "number-producer";
public static final String EVEN_STREAM = "even-numbers-producer";
final List<Integer> evenNumbers = Collections.synchronizedList(new ArrayList<>());
@Outgoing(INT_STREAM)
public Multi<Integer> produceInts() {
return Multi.createFrom().range(1, 7);
}
@Incoming(INT_STREAM)
@Outgoing(EVEN_STREAM)
public Multi<Integer> timesTwo(Multi<Integer> input) {
return input.map(i -> i * 2);
}
@Incoming(EVEN_STREAM)
public void collect(Integer input) {
evenNumbers.add(input);
}
public List<Integer> getEvenNumbers() {
return evenNumbers;
}
}
|
MultiIntBean
|
java
|
spring-projects__spring-boot
|
buildSrc/src/test/java/org/springframework/boot/build/architecture/annotations/TestDeprecatedConfigurationProperty.java
|
{
"start": 1012,
"end": 1425
}
|
interface ____ {
/**
* The reason for the deprecation.
* @return the deprecation reason
*/
String reason() default "";
/**
* The field that should be used instead (if any).
* @return the replacement field
*/
String replacement() default "";
/**
* The version in which the property became deprecated.
* @return the version
*/
String since() default "";
}
|
TestDeprecatedConfigurationProperty
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/IsNormal.java
|
{
"start": 615,
"end": 898
}
|
class ____ implements BooleanSupplier {
private final LaunchMode launchMode;
public IsNormal(LaunchMode launchMode) {
this.launchMode = launchMode;
}
@Override
public boolean getAsBoolean() {
return launchMode == LaunchMode.NORMAL;
}
}
|
IsNormal
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/convert/PolymorphicUpdateValueTest.java
|
{
"start": 734,
"end": 1353
}
|
class ____ extends Parent {
public int w;
public int h;
}
/*
/********************************************************
/* Unit tests
/********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testPolymorphicTest() throws Exception
{
Child c = new Child();
c.w = 10;
c.h = 11;
MAPPER.readerForUpdating(c).readValue("{\"x\":3,\"y\":4,\"w\":111}");
assertEquals(3, c.x);
assertEquals(4, c.y);
assertEquals(111, c.w);
}
}
|
Child
|
java
|
elastic__elasticsearch
|
x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/L2ScoreNormalizerTests.java
|
{
"start": 392,
"end": 2168
}
|
class ____ extends ESTestCase {
public void testNormalizeTypicalVector() {
ScoreDoc[] docs = { new ScoreDoc(1, 3.0f, 0), new ScoreDoc(2, 4.0f, 0) };
ScoreDoc[] normalized = L2ScoreNormalizer.INSTANCE.normalizeScores(docs);
assertEquals(0.6f, normalized[0].score, 1e-5);
assertEquals(0.8f, normalized[1].score, 1e-5);
}
public void testAllZeros() {
ScoreDoc[] docs = { new ScoreDoc(1, 0.0f, 0), new ScoreDoc(2, 0.0f, 0) };
ScoreDoc[] normalized = L2ScoreNormalizer.INSTANCE.normalizeScores(docs);
assertEquals(0.0f, normalized[0].score, 0.0f);
assertEquals(0.0f, normalized[1].score, 0.0f);
}
public void testAllNaN() {
ScoreDoc[] docs = { new ScoreDoc(1, Float.NaN, 0), new ScoreDoc(2, Float.NaN, 0) };
ScoreDoc[] normalized = L2ScoreNormalizer.INSTANCE.normalizeScores(docs);
assertTrue(Float.isNaN(normalized[0].score));
assertTrue(Float.isNaN(normalized[1].score));
}
public void testMixedZeroAndNaN() {
ScoreDoc[] docs = { new ScoreDoc(1, 0.0f, 0), new ScoreDoc(2, Float.NaN, 0) };
ScoreDoc[] normalized = L2ScoreNormalizer.INSTANCE.normalizeScores(docs);
assertEquals(0.0f, normalized[0].score, 0.0f);
assertTrue(Float.isNaN(normalized[1].score));
}
public void testSingleElement() {
ScoreDoc[] docs = { new ScoreDoc(1, 42.0f, 0) };
ScoreDoc[] normalized = L2ScoreNormalizer.INSTANCE.normalizeScores(docs);
assertEquals(1.0f, normalized[0].score, 1e-5);
}
public void testEmptyArray() {
ScoreDoc[] docs = {};
ScoreDoc[] normalized = L2ScoreNormalizer.INSTANCE.normalizeScores(docs);
assertEquals(0, normalized.length);
}
}
|
L2ScoreNormalizerTests
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/customizers/ContextCustomizerTests.java
|
{
"start": 2791,
"end": 3025
}
|
class ____ extends DefaultTestContextBootstrapper {
@Override
protected List<ContextCustomizerFactory> getContextCustomizerFactories() {
return List.of(new EnigmaContextCustomizerFactory());
}
}
}
|
EnigmaTestContextBootstrapper
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/fielddata/SourceValueFetcherIndexFieldData.java
|
{
"start": 1326,
"end": 4041
}
|
class ____<T> implements IndexFieldData.Builder {
protected final String fieldName;
protected final ValuesSourceType valuesSourceType;
protected final ValueFetcher valueFetcher;
protected final SourceProvider sourceProvider;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
public Builder(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceProvider sourceProvider,
ToScriptFieldFactory<T> toScriptFieldFactory
) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.valueFetcher = valueFetcher;
this.sourceProvider = sourceProvider;
this.toScriptFieldFactory = toScriptFieldFactory;
}
}
protected final String fieldName;
protected final ValuesSourceType valuesSourceType;
protected final ValueFetcher valueFetcher;
protected final SourceProvider sourceProvider;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
protected SourceValueFetcherIndexFieldData(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceProvider sourceProvider,
ToScriptFieldFactory<T> toScriptFieldFactory
) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.valueFetcher = valueFetcher;
this.sourceProvider = sourceProvider;
this.toScriptFieldFactory = toScriptFieldFactory;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return valuesSourceType;
}
@Override
public SourceValueFetcherLeafFieldData<T> load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) {
throw new IllegalArgumentException("not supported for source fallback");
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("not supported for source fallback");
}
public abstract static
|
Builder
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OAuth2LoginConfigurerTests.java
|
{
"start": 38675,
"end": 39357
}
|
class ____ extends CommonSecurityFilterChainConfig
implements ApplicationListener<AuthenticationSuccessEvent> {
static List<AuthenticationSuccessEvent> EVENTS = new ArrayList<>();
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.oauth2Login((login) -> login
.clientRegistrationRepository(
new InMemoryClientRegistrationRepository(GOOGLE_CLIENT_REGISTRATION)));
// @formatter:on
return super.configureFilterChain(http);
}
@Override
public void onApplicationEvent(AuthenticationSuccessEvent event) {
EVENTS.add(event);
}
}
@Configuration
@EnableWebSecurity
static
|
OAuth2LoginConfig
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterInfo.java
|
{
"start": 1090,
"end": 1403
}
|
class ____ {
protected String name;
protected long value;
public TaskCounterInfo() {
}
public TaskCounterInfo(String name, long value) {
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public long getValue() {
return value;
}
}
|
TaskCounterInfo
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/android/FragmentNotInstantiableTest.java
|
{
"start": 3481,
"end": 3711
}
|
class ____ extends ParentFragment {
public DerivedFragmentNoConstructor(int x) {}
}
// BUG: Diagnostic contains: nullary constructor
public static
|
DerivedFragmentNoConstructor
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/asm/ClassReader.java
|
{
"start": 158296,
"end": 159185
}
|
class ____ or adapters.</i>
*
* @param constantPoolEntryIndex the index a constant pool entry in the class's constant pool
* table.
* @return the start offset in this {@link ClassReader} of the corresponding JVMS 'cp_info'
* structure, plus one.
*/
public int getItem(final int constantPoolEntryIndex) {
return cpInfoOffsets[constantPoolEntryIndex];
}
/**
* Returns a conservative estimate of the maximum length of the strings contained in the class's
* constant pool table.
*
* @return a conservative estimate of the maximum length of the strings contained in the class's
* constant pool table.
*/
public int getMaxStringLength() {
return maxStringLength;
}
/**
* Reads a byte value in this {@link ClassReader}. <i>This method is intended for {@link
* Attribute} sub classes, and is normally not needed by
|
generators
|
java
|
processing__processing4
|
java/src/processing/mode/java/JavaEditor.java
|
{
"start": 2279,
"end": 70071
}
|
class ____ extends Editor {
JavaMode jmode;
// Runner associated with this editor window
private Runner runtime;
private boolean runtimeLaunchRequested;
private final Object runtimeLock = new Object[0];
// Need to sort through the rest of these additions [fry]
protected final List<LineHighlight> breakpointedLines = new ArrayList<>();
protected LineHighlight currentLine; // where the debugger is suspended
protected final String breakpointMarkerComment = " //<>//";
JMenu modeMenu;
// protected JMenuItem inspectorItem;
// static final int ERROR_TAB_INDEX = 0;
protected PreprocService preprocService;
protected Debugger debugger;
final private InspectMode inspect;
final private ShowUsage usage;
final private Rename rename;
final private ErrorChecker errorChecker;
// set true to show AST debugging window
static private final boolean SHOW_AST_VIEWER = false;
private ASTViewer astViewer;
/** P5 in decimal; if there are complaints, move to preferences.txt */
static final int REFERENCE_PORT = 8053;
// weird to link to a specific location like this, but it's versioned, so:
static final String REFERENCE_URL =
"https://github.com/processing/processing4/releases/tag/processing-1300-4.4.0";
static final String REFERENCE_URL_2 = "https://github.com/processing/processing4/releases/download/processing-1300-4.4.0/processing-4.4.0-reference.zip";
Boolean useReferenceServer;
ReferenceServer referenceServer;
protected JavaEditor(Base base, String path, EditorState state,
Mode mode) throws EditorException {
super(base, path, state, mode);
jmode = (JavaMode) mode;
debugger = new Debugger(this);
debugger.populateMenu(modeMenu);
// set breakpoints from marker comments
for (LineID lineID : stripBreakpointComments()) {
//System.out.println("setting: " + lineID);
debugger.setBreakpoint(lineID);
}
// setting breakpoints will flag sketch as modified, so override this here
getSketch().setModified(false);
preprocService = new PreprocService(this.jmode, this.sketch);
usage = new ShowUsage(this, preprocService);
inspect = new InspectMode(this, preprocService, usage);
rename = new Rename(this, preprocService, usage);
if (SHOW_AST_VIEWER) {
astViewer = new ASTViewer(this, preprocService);
}
errorChecker = new ErrorChecker(this::setProblemList, preprocService);
for (SketchCode code : getSketch().getCode()) {
Document document = code.getDocument();
addDocumentListener(document);
}
sketchChanged();
Toolkit.setMenuMnemonics(textarea.getRightClickPopup());
// ensure completion is hidden when editor loses focus
addWindowFocusListener(new WindowFocusListener() {
public void windowLostFocus(WindowEvent e) {
getJavaTextArea().hideSuggestion();
}
public void windowGainedFocus(WindowEvent e) { }
});
}
public PdePreprocessor createPreprocessor(final String sketchName) {
return PdePreprocessor.builderFor(sketchName).build();
}
protected JEditTextArea createTextArea() {
return new JavaTextArea(new PdeTextAreaDefaults(), this);
}
public EditorToolbar createToolbar() {
return new JavaToolbar(this);
}
private int previousTabCount = 1;
// TODO: this is a clumsy way to get notified when tabs get added/deleted
// Override the parent call to add hook to the rebuild() method
public EditorHeader createHeader() {
return new EditorHeader(this) {
public void rebuild() {
super.rebuild();
// after Rename and New Tab, we may have new .java tabs
if (preprocService != null) {
int currentTabCount = sketch.getCodeCount();
if (currentTabCount != previousTabCount) {
previousTabCount = currentTabCount;
sketchChanged();
}
}
}
};
}
@Override
public EditorFooter createFooter() {
EditorFooter footer = super.createFooter();
addErrorTable(footer);
return footer;
}
public Formatter createFormatter() {
return new AutoFormat();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
public JMenu buildFileMenu() {
//String appTitle = JavaToolbar.getTitle(JavaToolbar.EXPORT, false);
String appTitle = Language.text("menu.file.export_application");
JMenuItem exportApplication = Toolkit.newJMenuItemShift(appTitle, 'E');
exportApplication.addActionListener(e -> {
if (sketch.isUntitled() || sketch.isReadOnly()) {
// Exporting to application will open the sketch folder, which is
// weird for untitled sketches (that live in a temp folder) and
// read-only sketches (that live in the examples folder).
// TODO Better explanation? And some localization too.
Messages.showMessage("Save First", "Please first save the sketch.");
} else {
handleExportApplication();
}
});
var exportPDEZ = new JMenuItem(Language.text("menu.file.export_pdez"));
exportPDEZ.addActionListener(e -> {
if (sketch.isUntitled() || sketch.isReadOnly()) {
Messages.showMessage("Save First", "Please first save the sketch.");
} else {
handleExportPDEZ();
}
});
return buildFileMenu(new JMenuItem[] { exportApplication, exportPDEZ });
}
public JMenu buildSketchMenu() {
JMenuItem runItem = Toolkit.newJMenuItem(Language.text("menu.sketch.run"), 'R');
runItem.addActionListener(e -> handleRun());
JMenuItem presentItem = Toolkit.newJMenuItemShift(Language.text("menu.sketch.present"), 'R');
presentItem.addActionListener(e -> handlePresent());
JMenuItem stopItem = new JMenuItem(Language.text("menu.sketch.stop"));
stopItem.addActionListener(e -> {
if (isDebuggerEnabled()) {
Messages.log("Invoked 'Stop' menu item");
debugger.stopDebug();
} else {
handleStop();
}
});
JMenuItem tweakItem = Toolkit.newJMenuItemShift(Language.text("menu.sketch.tweak"), 'T');
tweakItem.addActionListener(e -> handleTweak());
return buildSketchMenu(new JMenuItem[] {
runItem, presentItem, tweakItem, stopItem
});
}
public JMenu buildHelpMenu() {
JMenu menu = new JMenu(Language.text("menu.help"));
JMenuItem item;
// macOS already has its own about menu
if (!Platform.isMacOS()) {
item = new JMenuItem(Language.text("menu.help.about"));
item.addActionListener(e -> new About(JavaEditor.this));
menu.add(item);
}
item = new JMenuItem(Language.text("menu.help.welcome"));
item.addActionListener(e -> {
try {
new Welcome(base);
} catch (IOException ioe) {
Messages.showWarning("Unwelcome Error",
"Please report this error to\n" +
"https://github.com/processing/processing4/issues", ioe);
}
});
menu.add(item);
item = new JMenuItem(Language.text("menu.help.environment"));
item.addActionListener(e -> showReference("../environment/index.html"));
menu.add(item);
item = new JMenuItem(Language.text("menu.help.reference"));
item.addActionListener(e -> showReference("index.html"));
menu.add(item);
item = Toolkit.newJMenuItemShift(Language.text("menu.help.find_in_reference"), 'F');
item.addActionListener(e -> {
if (textarea.isSelectionActive()) {
handleFindReference();
} else {
statusNotice(Language.text("editor.status.find_reference.select_word_first"));
}
});
menu.add(item);
// Not gonna use "update" since it's more about re-downloading:
// it doesn't make sense to "update" the reference because it's
// specific to a version of the software anyway. [fry 221125]
// item = new JMenuItem(isReferenceDownloaded() ?
// "menu.help.reference.update" : "menu.help.reference.download");
item = new JMenuItem(Language.text("menu.help.reference.download"));
item.addActionListener(e -> new Thread(this::downloadReference).start());
menu.add(item);
menu.addSeparator();
// Report a bug link opener
item = new JMenuItem(Language.text("menu.help.report"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.report.url")));
menu.add(item);
// Ask on the Forum link opener
item = new JMenuItem(Language.text("menu.help.ask"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.ask.url")));
menu.add(item);
menu.addSeparator();
final JMenu libRefSubmenu = new JMenu(Language.text("menu.help.libraries_reference"));
// Adding this in case references are included in a core library,
// or other core libraries are included in the future
boolean isCoreLibMenuItemAdded =
addLibReferencesToSubMenu(mode.coreLibraries, libRefSubmenu);
if (isCoreLibMenuItemAdded && !mode.contribLibraries.isEmpty()) {
libRefSubmenu.addSeparator();
}
boolean isContribLibMenuItemAdded =
addLibReferencesToSubMenu(mode.contribLibraries, libRefSubmenu);
if (!isContribLibMenuItemAdded && !isCoreLibMenuItemAdded) {
JMenuItem emptyMenuItem = new JMenuItem(Language.text("menu.help.empty"));
emptyMenuItem.setEnabled(false);
emptyMenuItem.setFocusable(false);
emptyMenuItem.setFocusPainted(false);
libRefSubmenu.add(emptyMenuItem);
} else if (!isContribLibMenuItemAdded && !mode.coreLibraries.isEmpty()) {
//re-populate the menu to get rid of terminal separator
libRefSubmenu.removeAll();
addLibReferencesToSubMenu(mode.coreLibraries, libRefSubmenu);
}
menu.add(libRefSubmenu);
final JMenu toolRefSubmenu = new JMenu(Language.text("menu.help.tools_reference"));
boolean coreToolMenuItemAdded;
boolean contribToolMenuItemAdded;
List<ToolContribution> contribTools = base.getContribTools();
// Adding this in case a reference folder is added for MovieMaker,
// or in case other core tools are introduced later.
coreToolMenuItemAdded = addToolReferencesToSubMenu(base.getCoreTools(), toolRefSubmenu);
if (coreToolMenuItemAdded && !contribTools.isEmpty())
toolRefSubmenu.addSeparator();
contribToolMenuItemAdded = addToolReferencesToSubMenu(contribTools, toolRefSubmenu);
if (!contribToolMenuItemAdded && !coreToolMenuItemAdded) {
toolRefSubmenu.removeAll(); // in case a separator was added
final JMenuItem emptyMenuItem = new JMenuItem(Language.text("menu.help.empty"));
emptyMenuItem.setEnabled(false);
emptyMenuItem.setBorderPainted(false);
emptyMenuItem.setFocusable(false);
emptyMenuItem.setFocusPainted(false);
toolRefSubmenu.add(emptyMenuItem);
}
else if (!contribToolMenuItemAdded && !contribTools.isEmpty()) {
// re-populate the menu to get rid of terminal separator
toolRefSubmenu.removeAll();
addToolReferencesToSubMenu(base.getCoreTools(), toolRefSubmenu);
}
menu.add(toolRefSubmenu);
menu.addSeparator();
/*
item = new JMenuItem(Language.text("menu.help.online"));
item.setEnabled(false);
menu.add(item);
*/
item = new JMenuItem(Language.text("menu.help.getting_started"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.getting_started.url")));
menu.add(item);
item = new JMenuItem(Language.text("menu.help.troubleshooting"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.troubleshooting.url")));
menu.add(item);
item = new JMenuItem(Language.text("menu.help.faq"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.faq.url")));
menu.add(item);
item = new JMenuItem(Language.text("menu.help.foundation"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.foundation.url")));
menu.add(item);
item = new JMenuItem(Language.text("menu.help.visit"));
item.addActionListener(e -> Platform.openURL(Language.text("menu.help.visit.url")));
menu.add(item);
return menu;
}
//. . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Populates the JMenu with JMenuItems, one for each Library that has a
* reference accompanying it. The JMenuItems open the index.htm/index.html
* file of the reference in the user's default browser, or the readme.txt in
* the user's default text editor.
*
* @param libsList
* A list of the Libraries to be added
* @param subMenu
* The JMenu to which the JMenuItems corresponding to the Libraries
* are to be added
* @return true if and only if any JMenuItems were added; false otherwise
*/
private boolean addLibReferencesToSubMenu(List<Library> libsList, JMenu subMenu) {
boolean isItemAdded = false;
for (Library libContrib : libsList) {
if (libContrib.hasReference()) {
JMenuItem libRefItem = new JMenuItem(libContrib.getName());
libRefItem.addActionListener(arg0 -> showReferenceFile(libContrib.getReferenceIndexFile()));
subMenu.add(libRefItem);
isItemAdded = true;
}
}
return isItemAdded;
}
/**
* Populates the JMenu with JMenuItems, one for each Tool that has a reference
* accompanying it. The JMenuItems open the index.htm/index.html file of the
* reference in the user's default browser, or the readme.txt in the user's
* default text editor.
*
* @param toolsList
* A list of Tools to be added
* @param subMenu
* The JMenu to which the JMenuItems corresponding to the Tools are
* to be added
* @return true if and only if any JMenuItems were added; false otherwise
*/
private boolean addToolReferencesToSubMenu(List<ToolContribution> toolsList, JMenu subMenu) {
boolean isItemAdded = false;
for (ToolContribution toolContrib : toolsList) {
final File toolRef = new File(toolContrib.getFolder(), "reference/index.html");
if (toolRef.exists()) {
JMenuItem libRefItem = new JMenuItem(toolContrib.getName());
libRefItem.addActionListener(arg0 -> showReferenceFile(toolRef));
subMenu.add(libRefItem);
isItemAdded = true;
}
}
return isItemAdded;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
public String getCommentPrefix() {
return "//";
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Handler for Sketch → Export Application
*/
public void handleExportApplication() {
if (handleExportCheckModified()) {
statusNotice(Language.text("export.notice.exporting"));
ExportPrompt ep = new ExportPrompt(this, () -> {
try {
if (jmode.handleExportApplication(getSketch())) {
Platform.openFolder(sketch.getFolder());
statusNotice(Language.text("export.notice.exporting.done"));
}
} catch (Exception e) {
statusNotice(Language.text("export.notice.exporting.error"));
e.printStackTrace();
}
});
ep.trigger();
}
}
/**
* Handler for File → Export PDEZ
*/
public void handleExportPDEZ() {
if (handleExportCheckModified()) {
var sketch = getSketch();
var folder = sketch.getFolder().toPath();
var target = new File(folder + ".pdez").toPath();
if (Files.exists(target)) {
try {
Platform.deleteFile(target.toFile());
} catch (IOException e) {
Messages.showError("Export Error", "Could not delete existing file: " + target, e);
}
}
try (var zs = new ZipOutputStream(Files.newOutputStream(target))) {
Files.walk(folder)
.filter(path -> !Files.isDirectory(path))
.forEach(path -> {
var zipEntry = new ZipEntry(folder.getParent().relativize(path).toString());
try {
zs.putNextEntry(zipEntry);
Files.copy(path, zs);
zs.closeEntry();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
if (Desktop.isDesktopSupported()) {
var desktop = Desktop.getDesktop();
if (desktop.isSupported(Desktop.Action.BROWSE_FILE_DIR)) {
desktop.browseFileDirectory(target.toFile());
} else {
try {
desktop.open(target.getParent().toFile());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
}
/**
* Checks to see if the sketch has been modified, and if so,
* asks the user to save the sketch or cancel the export.
* This prevents issues where an incomplete version of the sketch
* would be exported, and is a fix for
* <A HREF="https://download.processing.org/bugzilla/157.html">Bug 157</A>
*/
protected boolean handleExportCheckModified() {
if (sketch.isReadOnly()) {
// if the files are read-only, need to first do a "save as".
Messages.showMessage(Language.text("export.messages.is_read_only"),
Language.text("export.messages.is_read_only.description"));
return false;
}
// don't allow if untitled
if (sketch.isUntitled()) {
Messages.showMessage(Language.text("export.messages.cannot_export"),
Language.text("export.messages.cannot_export.description"));
return false;
}
if (sketch.isModified()) {
Object[] options = { Language.text("prompt.ok"), Language.text("prompt.cancel") };
int result = JOptionPane.showOptionDialog(this,
Language.text("export.unsaved_changes"),
Language.text("menu.file.save"),
JOptionPane.OK_CANCEL_OPTION,
JOptionPane.QUESTION_MESSAGE,
null,
options,
options[0]);
if (result == JOptionPane.OK_OPTION) {
handleSave(true);
} else {
// why it's not CANCEL_OPTION is beyond me (at least on the mac)
// but f-- it... let's get this shite done...
//} else if (result == JOptionPane.CANCEL_OPTION) {
statusNotice(Language.text("export.notice.cancel.unsaved_changes"));
//toolbar.clear();
return false;
}
}
return true;
}
public void handleRun() {
if (isDebuggerEnabled()) {
// Hitting Run while a sketch is running should restart the sketch
// https://github.com/processing/processing/issues/3623
if (debugger.isStarted()) {
debugger.stopDebug();
}
// Don't start the sketch paused, continue until a breakpoint or error
// https://github.com/processing/processing/issues/3096
debugger.continueDebug();
} else {
handleLaunch(false, false);
}
}
public void handlePresent() {
handleLaunch(true, false);
}
public void handleTweak() {
autoSave();
if (sketch.isModified()) {
Messages.showMessage(Language.text("menu.file.save"),
Language.text("tweak_mode.save_before_tweak"));
return;
}
handleLaunch(false, true);
}
protected void handleLaunch(boolean present, boolean tweak) {
prepareRun();
toolbar.activateRun();
synchronized (runtimeLock) {
runtimeLaunchRequested = true;
}
new Thread(() -> {
try {
synchronized (runtimeLock) {
if (runtimeLaunchRequested) {
runtimeLaunchRequested = false;
RunnerListener listener = new RunnerListenerEdtAdapter(JavaEditor.this);
if (!tweak) {
runtime = jmode.handleLaunch(sketch, listener, present);
} else {
runtime = jmode.handleTweak(sketch, listener, JavaEditor.this);
}
}
}
} catch (Exception e) {
EventQueue.invokeLater(() -> statusError(e));
}
}).start();
}
/**
* Event handler called when hitting the stop button. Stops a running debug
* session or performs standard stop action if not currently debugging.
*/
public void handleStop() {
if (debugger.isStarted()) {
debugger.stopDebug();
} else {
toolbar.activateStop();
try {
synchronized (runtimeLock) {
if (runtimeLaunchRequested) {
// Cancel the launch before the runtime was created
runtimeLaunchRequested = false;
}
if (runtime != null) {
// Cancel the launch after the runtime was created
runtime.close(); // kills the window
runtime = null;
}
}
} catch (Exception e) {
statusError(e);
}
toolbar.deactivateStop();
toolbar.deactivateRun();
// focus the PDE again after quitting presentation mode [toxi 030903]
toFront();
}
}
public void onRunnerExiting(Runner runner) {
synchronized (runtimeLock) {
if (this.runtime == runner) {
deactivateRun();
}
}
}
// /** Toggle a breakpoint on the current line. */
// public void toggleBreakpoint() {
// toggleBreakpoint(getCurrentLineID().lineIdx());
// }
@Override
public void toggleBreakpoint(int lineIndex) {
debugger.toggleBreakpoint(lineIndex);
}
public boolean handleSaveAs() {
//System.out.println("handleSaveAs");
String oldName = getSketch().getCode(0).getFileName();
//System.out.println("old name: " + oldName);
boolean saved = super.handleSaveAs();
if (saved) {
// re-set breakpoints in first tab (name has changed)
List<LineBreakpoint> bps = debugger.getBreakpoints(oldName);
debugger.clearBreakpoints(oldName);
String newName = getSketch().getCode(0).getFileName();
//System.out.println("new name: " + newName);
for (LineBreakpoint bp : bps) {
LineID line = new LineID(newName, bp.lineID().lineIdx());
//System.out.println("setting: " + line);
debugger.setBreakpoint(line);
}
// add breakpoint marker comments to source file
for (SketchCode code : getSketch().getCode()) {
addBreakpointComments(code.getFileName());
}
// set new name of variable inspector
//inspector.setTitle(getSketch().getName());
}
return saved;
}
/**
* Add import statements to the current tab for all packages inside
* the specified jar file.
*/
public void handleImportLibrary(String libraryName) {
// make sure the user didn't hide the sketch folder
sketch.ensureExistence();
// import statements into the main sketch file (code[0])
// if the current code is a .java file, insert into current
//if (current.flavor == PDE) {
if (mode.isDefaultExtension(sketch.getCurrentCode())) {
sketch.setCurrentCode(0);
}
Library lib = mode.findLibraryByName(libraryName);
if (lib == null) {
statusError("Unable to locate library: "+libraryName);
return;
}
// could also scan the text in the file to see if each import
// statement is already in there, but if the user has the import
// commented out, then this will be a problem.
StringList list = lib.getImports(); // ask the library for its imports
if (list == null) {
// Default to old behavior and load each package in the primary jar
list = Util.packageListFromClassPath(lib.getJarPath());
}
StringBuilder sb = new StringBuilder();
// for (int i = 0; i < list.length; i++) {
for (String item : list) {
sb.append("import ");
// sb.append(list[i]);
sb.append(item);
sb.append(".*;\n");
}
sb.append('\n');
sb.append(getText());
setText(sb.toString());
setSelection(0, 0); // scroll to start
sketch.setModified(true);
}
@Override
public void librariesChanged() {
preprocService.notifyLibrariesChanged();
}
@Override
public void codeFolderChanged() {
preprocService.notifyCodeFolderChanged();
}
@Override
public void sketchChanged() {
errorChecker.notifySketchChanged();
preprocService.notifySketchChanged();
}
public void addDocumentListener(Document doc) {
if (doc != null) {
doc.addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
sketchChanged();
}
@Override
public void removeUpdate(DocumentEvent e) {
sketchChanged();
}
@Override
public void changedUpdate(DocumentEvent e) {
sketchChanged();
}
});
}
}
public void showReference(String name) {
if (useReferenceServer == null) {
// Because of this, it should be possible to create your own dist
// that includes the reference by simply adding it to modes/java.
File referenceZip = new File(mode.getFolder(), "reference.zip");
if (!referenceZip.exists()) {
// For Java Mode (the default), check for a reference.zip in the root
// of the sketchbook folder. If other Modes subclass JavaEditor and
// don't override this function, it may cause a little trouble.
referenceZip = getOfflineReferenceFile();
}
if (referenceZip.exists()) {
try {
referenceServer = new ReferenceServer(referenceZip, REFERENCE_PORT);
useReferenceServer = true;
} catch (IOException e) {
Messages.showWarning("Reference Server Problem", "Error while starting the documentation server.");
}
} else {
useReferenceServer = false;
}
}
if (useReferenceServer) {
String url = referenceServer.getPrefix() + "reference/" + name;
Platform.openURL(url);
} else {
File file = new File(mode.getReferenceFolder(), name);
if (file.exists()) {
showReferenceFile(file);
} else {
// Offline reference (temporarily) removed in 4.0 beta 9
// https://github.com/processing/processing4/issues/524
Platform.openURL("https://processing.org/reference/" + name);
}
}
}
private File getOfflineReferenceFile() {
return new File(Base.getSketchbookFolder(), "reference.zip");
}
/*
private boolean isReferenceDownloaded() {
return getOfflineReferenceFile().exists();
}
*/
private String getReferenceDownloadUrl() {
String versionName = Base.getVersionName();
int revisionInt = Base.getRevision();
String revision = String.valueOf(revisionInt);
if ("unspecified".equals(versionName) || revisionInt == Integer.MAX_VALUE) {
return "https://github.com/processing/processing4/releases/download/processing-1300-4.4.0/processing-4.4.0-reference.zip";
}
String url = String.format(
"https://github.com/processing/processing4/releases/download/processing-%s-%s/processing-%s-reference.zip",
revision, versionName, versionName);
System.out.println("Generated URL: " + url);
return url;
}
private void downloadReference() {
try {
URL source = new URL(getReferenceDownloadUrl());
HttpURLConnection conn = (HttpURLConnection) source.openConnection();
HttpURLConnection.setFollowRedirects(true);
conn.setConnectTimeout(15 * 1000);
conn.setReadTimeout(60 * 1000);
conn.setRequestMethod("GET");
conn.connect();
int length = conn.getContentLength();
// float size = (length >> 10) / 1024f;
//float size = (length / 1000) / 1000f;
// String msg =
// "Downloading reference (" + PApplet.nf(size, 0, 1) + " MB)… ";
String mb = PApplet.nf((length >> 10) / 1024f, 0, 1);
if (mb.endsWith(".0")) {
mb = mb.substring(0, mb.length() - 2); // don't show .0
}
ProgressMonitorInputStream input =
new ProgressMonitorInputStream(this,
"Downloading reference (" + mb + " MB)… ", conn.getInputStream());
input.getProgressMonitor().setMaximum(length);
// ProgressMonitor monitor = input.getProgressMonitor();
// monitor.setMaximum(length);
PApplet.saveStream(getOfflineReferenceFile(), input);
// reset the internal handling for the reference server
useReferenceServer = null;
} catch (InterruptedIOException iioe) {
// download canceled
} catch (IOException e) {
Messages.showWarning("Error downloading reference",
"Could not download the reference. Try again later.", e);
}
}
public void statusError(String what) {
super.statusError(what);
// new Exception("deactivating RUN").printStackTrace();
// toolbar.deactivate(JavaToolbar.RUN);
toolbar.deactivateRun();
}
public void internalCloseRunner() {
// Added temporarily to dump error log. TODO: Remove this later [mk29]
//if (JavaMode.errorLogsEnabled) {
// writeErrorsToFile();
//}
handleStop();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
// Additions from PDE X, Debug Mode, Twerk Mode...
/**
* Used instead of the windowClosing event handler, since it's not called on
* mode switch. Called when closing the editor window. Stops running debug
* sessions and kills the variable inspector window.
*/
@Override
public void dispose() {
//System.out.println("window dispose");
// quit running debug session
if (debugger.isEnabled()) {
debugger.stopDebug();
}
debugger.dispose();
preprocService.dispose();
inspect.dispose();
usage.dispose();
rename.dispose();
errorChecker.dispose();
if (astViewer != null) {
astViewer.dispose();
}
super.dispose();
}
@Override
public boolean isDebuggerEnabled() {
return debugger.isEnabled();
}
@Override
public JMenu buildModeMenu() {
return modeMenu = new JMenu(Language.text("menu.debug"));
}
// handleOpenInternal() only called by the Editor constructor, meaning that
// this code is all useless. All these things will be in their default state.
// /**
// * Event handler called when loading another sketch in this editor.
// * Clears breakpoints of previous sketch.
// * @return true if a sketch was opened, false if aborted
// */
// @Override
// protected void handleOpenInternal(String path) throws EditorException {
// super.handleOpenInternal(path);
//
// // should already been stopped (open calls handleStop)
// if (debugger != null) {
// debugger.clearBreakpoints();
// }
// clearBreakpointedLines();
// variableInspector().reset();
// }
/**
* Extract breakpointed lines from source code marker comments. This removes
* marker comments from the editor text. Intended to be called on loading a
* sketch, since re-setting the sketches contents after removing the markers
* will clear all breakpoints.
*
* @return the list of {@link LineID}s where breakpoint marker comments were
* removed from.
*/
protected List<LineID> stripBreakpointComments() {
List<LineID> bps = new ArrayList<>();
// iterate over all tabs
Sketch sketch = getSketch();
for (int i = 0; i < sketch.getCodeCount(); i++) {
SketchCode tab = sketch.getCode(i);
String code = tab.getProgram();
String[] lines = code.split("\\r?\\n"); // newlines not included
//System.out.println(code);
// scan code for breakpoint comments
int lineIdx = 0;
for (String line : lines) {
//System.out.println(line);
if (line.endsWith(breakpointMarkerComment)) {
LineID lineID = new LineID(tab.getFileName(), lineIdx);
bps.add(lineID);
//System.out.println("found breakpoint: " + lineID);
// got a breakpoint
//dbg.setBreakpoint(lineID);
int index = line.lastIndexOf(breakpointMarkerComment);
lines[lineIdx] = line.substring(0, index);
}
lineIdx++;
}
//tab.setProgram(code);
code = PApplet.join(lines, "\n");
setTabContents(tab.getFileName(), code);
}
return bps;
}
/**
* Add breakpoint marker comments to the source file of a specific tab. This
* acts on the source file on disk, not the editor text. Intended to be
* called just after saving the sketch.
*
* @param tabFilename the tab file name
*/
protected void addBreakpointComments(String tabFilename) {
SketchCode tab = getTab(tabFilename);
if (tab == null) {
// this method gets called twice when saving sketch for the first time
// once with new name and another with old(causing NPE). Keep an eye out
// for potential issues. See #2675. TODO:
Messages.err("Illegal tab name to addBreakpointComments() " + tabFilename);
return;
}
List<LineBreakpoint> bps = debugger.getBreakpoints(tab.getFileName());
// load the source file
////switched to using methods provided by the SketchCode class
// File sourceFile = new File(sketch.getFolder(), tab.getFileName());
//System.out.println("file: " + sourceFile);
try {
tab.load();
String code = tab.getProgram();
//System.out.println("code: " + code);
String[] lines = code.split("\\r?\\n"); // newlines not included
for (LineBreakpoint bp : bps) {
//System.out.println("adding bp: " + bp.lineID());
lines[bp.lineID().lineIdx()] += breakpointMarkerComment;
}
code = PApplet.join(lines, "\n");
//System.out.println("new code: " + code);
tab.setProgram(code);
tab.save();
} catch (IOException ex) {
Messages.err(null, ex);
}
}
@Override
public boolean handleSave(boolean immediately) {
// note modified tabs
final List<String> modified = new ArrayList<>();
for (int i = 0; i < getSketch().getCodeCount(); i++) {
SketchCode tab = getSketch().getCode(i);
if (tab.isModified()) {
modified.add(tab.getFileName());
}
}
boolean saved = super.handleSave(immediately);
if (saved) {
if (immediately) {
for (String tabFilename : modified) {
addBreakpointComments(tabFilename);
}
} else {
EventQueue.invokeLater(() -> {
for (String tabFilename : modified) {
addBreakpointComments(tabFilename);
}
});
}
}
// if file location has changed, update autosaver
// autosaver.reloadAutosaveDir();
return saved;
}
/**
* Set text contents of a specific tab. Updates underlying document and text
* area. Clears Breakpoints.
*
* @param tabFilename the tab file name
* @param code the text to set
*/
protected void setTabContents(String tabFilename, String code) {
// remove all breakpoints of this tab
debugger.clearBreakpoints(tabFilename);
SketchCode currentTab = getCurrentTab();
// set code of tab
SketchCode tab = getTab(tabFilename);
if (tab != null) {
tab.setProgram(code);
// this updates document and text area
// TODO: does this have any negative effects? (setting the doc to null)
tab.setDocument(null);
setCode(tab);
// switch back to original tab
setCode(currentTab);
}
}
public void clearConsole() {
console.clear();
}
public void clearSelection() {
setSelection(getCaretOffset(), getCaretOffset());
}
/**
* Select a line in the current tab.
* @param lineIdx 0-based line number
*/
public void selectLine(int lineIdx) {
setSelection(getLineStartOffset(lineIdx), getLineStopOffset(lineIdx));
}
/**
* Set the cursor to the start of a line.
* @param lineIdx 0-based line number
*/
public void cursorToLineStart(int lineIdx) {
setSelection(getLineStartOffset(lineIdx), getLineStartOffset(lineIdx));
}
/**
* Set the cursor to the end of a line.
* @param lineIdx 0-based line number
*/
public void cursorToLineEnd(int lineIdx) {
setSelection(getLineStopOffset(lineIdx), getLineStopOffset(lineIdx));
}
/**
* Switch to a tab.
* @param tabFileName the file name identifying the tab. (as in
* {@link SketchCode#getFileName()})
*/
public void switchToTab(String tabFileName) {
Sketch s = getSketch();
for (int i = 0; i < s.getCodeCount(); i++) {
if (tabFileName.equals(s.getCode(i).getFileName())) {
s.setCurrentCode(i);
break;
}
}
}
public Debugger getDebugger() {
return debugger;
}
/**
* Access the custom text area object.
* @return the text area object
*/
public JavaTextArea getJavaTextArea() {
return (JavaTextArea) textarea;
}
public PreprocService getPreprocessingService() {
return preprocService;
}
/**
* Grab current contents of the sketch window, advance the console, stop any
* other running sketches, auto-save the user's code... not in that order.
*/
@Override
public void prepareRun() {
autoSave();
super.prepareRun();
downloadImports();
preprocService.cancel();
}
/**
* Downloads libraries that have been imported, that aren't available as a
* LocalContribution, but that have an AvailableContribution associated with
* them.
*/
protected void downloadImports() {
for (SketchCode sc : sketch.getCode()) {
if (sc.isExtension("pde")) {
String tabCode = sc.getProgram();
List<ImportStatement> imports = SourceUtil.parseProgramImports(tabCode);
if (!imports.isEmpty()) {
ArrayList<String> importHeaders = new ArrayList<>();
for (ImportStatement importStatement : imports) {
importHeaders.add(importStatement.getFullMemberName());
}
List<AvailableContribution> installLibsHeaders =
getNotInstalledAvailableLibs(importHeaders);
if (!installLibsHeaders.isEmpty()) {
StringBuilder libList = new StringBuilder("Would you like to install them now?");
for (AvailableContribution ac : installLibsHeaders) {
libList.append("\n • ").append(ac.getName());
}
int option = Messages.showYesNoQuestion(this,
Language.text("contrib.import.dialog.title"),
Language.text("contrib.import.dialog.primary_text"),
libList.toString());
if (option == JOptionPane.YES_OPTION) {
ContributionManager.downloadAndInstallOnImport(base, installLibsHeaders);
}
}
}
}
}
}
/**
* Returns a list of AvailableContributions of those libraries that the user
* wants imported, but that are not installed.
*/
private List<AvailableContribution> getNotInstalledAvailableLibs(List<String> importHeadersList) {
Map<String, Contribution> importMap =
ContributionListing.getInstance().getLibraryExports();
List<AvailableContribution> libList = new ArrayList<>();
for (String importHeaders : importHeadersList) {
int dot = importHeaders.lastIndexOf('.');
String entry = (dot == -1) ? importHeaders : importHeaders.substring(0,
dot);
if (entry.startsWith("java.") ||
entry.startsWith("javax.") ||
entry.startsWith("processing.")) {
continue;
}
Library library;
try {
library = this.getMode().getLibrary(entry);
if (library == null) {
Contribution c = importMap.get(importHeaders);
if (c instanceof AvailableContribution) { // also checks null
libList.add((AvailableContribution) c);// System.out.println(importHeaders
// + "not found");
}
}
} catch (Exception e) {
// Not gonna happen (hopefully)
Contribution c = importMap.get(importHeaders);
if (c instanceof AvailableContribution) { // also checks null
libList.add((AvailableContribution) c);// System.out.println(importHeaders
// + "not found");
}
}
}
return libList;
}
/**
* Displays a JDialog prompting the user to save when the user hits
* run/present/etc.
*/
protected void autoSave() {
if (!JavaMode.autoSaveEnabled) {
return;
}
try {
if (sketch.isModified() && !sketch.isUntitled()) {
if (JavaMode.autoSavePromptEnabled) {
final JDialog autoSaveDialog =
new JDialog(base.getActiveEditor(), getSketch().getName(), true);
Container container = autoSaveDialog.getContentPane();
JPanel panelMain = new JPanel();
panelMain.setBorder(BorderFactory.createEmptyBorder(4, 0, 2, 2));
panelMain.setLayout(new BoxLayout(panelMain, BoxLayout.PAGE_AXIS));
JPanel panelLabel = new JPanel(new FlowLayout(FlowLayout.LEFT));
JLabel label = new JLabel("<html><body> There are unsaved"
+ " changes in your sketch.<br />"
+ " Do you want to save it before"
+ " running? </body></html>");
label.setFont(new Font(
label.getFont().getName(),
Font.PLAIN,
Toolkit.zoom(label.getFont().getSize() + 1)
));
panelLabel.add(label);
panelMain.add(panelLabel);
final JCheckBox dontRedisplay = new JCheckBox("Remember this decision");
JPanel panelButtons = new JPanel(new FlowLayout(FlowLayout.CENTER, 8, 2));
JButton btnRunSave = new JButton("Save and Run");
btnRunSave.addActionListener(e -> {
handleSave(true);
if (dontRedisplay.isSelected()) {
JavaMode.autoSavePromptEnabled = !dontRedisplay.isSelected();
JavaMode.defaultAutoSaveEnabled = true;
jmode.savePreferences();
}
autoSaveDialog.dispose();
});
panelButtons.add(btnRunSave);
JButton btnRunNoSave = new JButton("Run, Don't Save");
btnRunNoSave.addActionListener(e -> {
if (dontRedisplay.isSelected()) {
JavaMode.autoSavePromptEnabled = !dontRedisplay.isSelected();
JavaMode.defaultAutoSaveEnabled = false;
jmode.savePreferences();
}
autoSaveDialog.dispose();
});
panelButtons.add(btnRunNoSave);
panelMain.add(panelButtons);
JPanel panelCheck = new JPanel();
panelCheck.setLayout(new FlowLayout(FlowLayout.CENTER, 0, 0));
panelCheck.add(dontRedisplay);
panelMain.add(panelCheck);
container.add(panelMain);
autoSaveDialog.setResizable(false);
autoSaveDialog.pack();
autoSaveDialog.setLocationRelativeTo(base.getActiveEditor());
autoSaveDialog.setVisible(true);
} else if (JavaMode.defaultAutoSaveEnabled) {
handleSave(true);
}
}
} catch (Exception e) {
statusError(e);
}
}
public void activateRun() {
debugger.enableMenuItem(false);
toolbar.activateRun();
}
/**
* Deactivate the Run button. This is called by Runner to notify that the
* sketch has stopped running, usually in response to an error (or maybe
* the sketch completing and exiting?) Tools should not call this function.
* To initiate a "stop" action, call handleStop() instead.
*/
public void deactivateRun() {
toolbar.deactivateRun();
debugger.enableMenuItem(true);
}
/*
protected void activateDebug() {
activateRun();
}
public void deactivateDebug() {
deactivateRun();
}
*/
public void activateContinue() {
((JavaToolbar) toolbar).activateContinue();
}
public void deactivateContinue() {
((JavaToolbar) toolbar).deactivateContinue();
}
public void activateStep() {
((JavaToolbar) toolbar).activateStep();
}
public void deactivateStep() {
((JavaToolbar) toolbar).deactivateStep();
}
public void toggleDebug() {
// debugEnabled = !debugEnabled;
debugger.toggleEnabled();
rebuildToolbar();
repaint(); // show/hide breakpoints in the gutter
/*
if (debugEnabled) {
debugItem.setText(Language.text("menu.debug.disable"));
} else {
debugItem.setText(Language.text("menu.debug.enable"));
}
inspector.setVisible(debugEnabled);
for (Component item : debugMenu.getMenuComponents()) {
if (item instanceof JMenuItem && item != debugItem) {
item.setEnabled(debugEnabled);
}
}
*/
}
/*
public void toggleVariableInspector() {
if (inspector.isVisible()) {
inspectorItem.setText(Language.text("menu.debug.show_variables"));
inspector.setVisible(false);
} else {
// inspector.setFocusableWindowState(false); // to not get focus when set visible
inspectorItem.setText(Language.text("menu.debug.show_variables"));
inspector.setVisible(true);
// inspector.setFocusableWindowState(true); // allow to get focus again
}
}
*/
// public void showVariableInspector() {
// tray.setVisible(true);
// }
// /**
// * Set visibility of the variable inspector window.
// * @param visible true to set the variable inspector visible,
// * false for invisible.
// */
// public void showVariableInspector(boolean visible) {
// tray.setVisible(visible);
// }
//
//
// public void hideVariableInspector() {
// tray.setVisible(true);
// }
//
//
// /** Toggle visibility of the variable inspector window. */
// public void toggleVariableInspector() {
// tray.setFocusableWindowState(false); // to not get focus when set visible
// tray.setVisible(!tray.isVisible());
// tray.setFocusableWindowState(true); // allow to get focus again
// }
/**
* Set the line to highlight as currently suspended at. Will override the
* breakpoint color, if set. Switches to the appropriate tab and scroll to
* the line by placing the cursor there.
* @param line the line to highlight as current suspended line
*/
public void setCurrentLine(LineID line) {
clearCurrentLine();
if (line == null) {
// safety, e.g. when no line mapping is found and the null line is used.
return;
}
switchToTab(line.fileName());
// scroll to line, by setting the cursor
cursorToLineStart(line.lineIdx());
// highlight line
currentLine = new LineHighlight(line.lineIdx(), this);
currentLine.setMarker(PdeTextArea.STEP_MARKER);
currentLine.setPriority(10); // fixes current line being hidden by the breakpoint when moved down
}
/** Clear the highlight for the debuggers current line. */
public void clearCurrentLine() {
if (currentLine != null) {
currentLine.clear();
currentLine.dispose();
// revert to breakpoint color if any is set on this line
for (LineHighlight hl : breakpointedLines) {
if (hl.getLineID().equals(currentLine.getLineID())) {
hl.paint();
break;
}
}
currentLine = null;
}
}
/**
* Add highlight for a breakpointed line.
* @param lineID the line id to highlight as breakpointed
*/
public void addBreakpointedLine(LineID lineID) {
LineHighlight hl = new LineHighlight(lineID, this);
hl.setMarker(PdeTextArea.BREAK_MARKER);
breakpointedLines.add(hl);
// repaint current line if it's on this line
if (currentLine != null && currentLine.getLineID().equals(lineID)) {
currentLine.paint();
}
}
/**
* Remove a highlight for a breakpointed line. Needs to be on the current tab.
* @param lineIdx the line index on the current tab to remove a breakpoint
* highlight from
*/
public void removeBreakpointedLine(int lineIdx) {
LineID line = getLineIDInCurrentTab(lineIdx);
//System.out.println("line id: " + line.fileName() + " " + line.lineIdx());
LineHighlight foundLine = null;
for (LineHighlight hl : breakpointedLines) {
if (hl.getLineID().equals(line)) {
foundLine = hl;
break;
}
}
if (foundLine != null) {
foundLine.clear();
breakpointedLines.remove(foundLine);
foundLine.dispose();
// repaint current line if it's on this line
if (currentLine != null && currentLine.getLineID().equals(line)) {
currentLine.paint();
}
}
}
/*
// Remove all highlights for breakpoint lines.
public void clearBreakpointedLines() {
for (LineHighlight hl : breakpointedLines) {
hl.clear();
hl.dispose();
}
breakpointedLines.clear(); // remove all breakpoints
// fix highlights not being removed when tab names have
// changed due to opening a new sketch in same editor
getJavaTextArea().clearGutterText();
// repaint current line
if (currentLine != null) {
currentLine.paint();
}
}
*/
/**
* Retrieve a {@link LineID} object for a line on the current tab.
* @param lineIdx the line index on the current tab
* @return the {@link LineID} object representing a line index on the
* current tab
*/
public LineID getLineIDInCurrentTab(int lineIdx) {
return new LineID(getSketch().getCurrentCode().getFileName(), lineIdx);
}
/**
* Retrieve line of sketch where the cursor currently resides.
* @return the current {@link LineID}
*/
public LineID getCurrentLineID() {
String tab = getSketch().getCurrentCode().getFileName();
int lineNo = getTextArea().getCaretLine();
return new LineID(tab, lineNo);
}
/**
* Check whether a {@link LineID} is on the current tab.
* @param line the {@link LineID}
* @return true, if the {@link LineID} is on the current tab.
*/
public boolean isInCurrentTab(LineID line) {
return line.fileName().equals(getSketch().getCurrentCode().getFileName());
}
/**
* Event handler called when switching between tabs. Loads all line
* background colors set for the tab.
* @param code tab to switch to
*/
@Override
public void setCode(SketchCode code) {
Document oldDoc = code.getDocument();
//System.out.println("tab switch: " + code.getFileName());
// set the new document in the textarea, etc. need to do this first
super.setCode(code);
Document newDoc = code.getDocument();
if (oldDoc != newDoc) {
addDocumentListener(newDoc);
}
// set line background colors for tab
final JavaTextArea ta = getJavaTextArea();
// can be null when setCode is called the first time (in constructor)
if (ta != null) {
// clear all gutter text
ta.clearGutterText();
// first paint breakpoints
if (breakpointedLines != null) {
for (LineHighlight hl : breakpointedLines) {
if (isInCurrentTab(hl.getLineID())) {
hl.paint();
}
}
}
// now paint current line (if any)
if (currentLine != null) {
if (isInCurrentTab(currentLine.getLineID())) {
currentLine.paint();
}
}
}
if (getDebugger() != null && getDebugger().isStarted()) {
getDebugger().startTrackingLineChanges();
}
if (errorColumn != null) {
errorColumn.repaint();
}
}
/**
* Get a tab by its file name.
* @param filename the filename to search for.
* @return the {@link SketchCode} object for the tab, or null if not found
*/
public SketchCode getTab(String filename) {
Sketch s = getSketch();
for (SketchCode c : s.getCode()) {
if (c.getFileName().equals(filename)) {
return c;
}
}
return null;
}
/**
* Retrieve the current tab.
* @return the {@link SketchCode} representing the current tab
*/
public SketchCode getCurrentTab() {
return getSketch().getCurrentCode();
}
/**
* Access the currently edited document.
* @return the document object
*/
public Document currentDocument() {
return getCurrentTab().getDocument();
}
public void statusBusy() {
statusNotice(Language.text("editor.status.debug.busy"));
}
public void statusHalted() {
statusNotice(Language.text("editor.status.debug.halt"));
}
/**
* Updates the error table in the Error Window.
* Overridden to handle the fugly import suggestions text.
*/
@Override
public void updateErrorTable(List<Problem> problems) {
errorTable.clearRows();
for (Problem p : problems) {
String message = p.getMessage();
if (p.getClass().equals(JavaProblem.class)) {
JavaProblem jp = (JavaProblem) p;
if (JavaMode.importSuggestEnabled &&
jp.getImportSuggestions() != null &&
jp.getImportSuggestions().length > 0) {
message += " (double-click for suggestions)";
}
}
errorTable.addRow(p, message,
sketch.getCode(p.getTabIndex()).getPrettyName(),
Integer.toString(p.getLineNumber() + 1));
// Added +1 because lineNumbers internally are 0-indexed
}
}
@Override
public void errorTableDoubleClick(Object item) {
if (!item.getClass().equals(JavaProblem.class)) {
errorTableClick(item);
}
JavaProblem p = (JavaProblem) item;
// MouseEvent evt = null;
String[] suggs = p.getImportSuggestions();
if (suggs != null && suggs.length > 0) {
// String t = p.getMessage() + "(Import Suggestions available)";
// FontMetrics fm = getFontMetrics(getFont());
// int x1 = fm.stringWidth(p.getMessage());
// int x2 = fm.stringWidth(t);
// if (evt.getX() > x1 && evt.getX() < x2) {
String[] list = p.getImportSuggestions();
String className = list[0].substring(list[0].lastIndexOf('.') + 1);
String[] temp = new String[list.length];
for (int i = 0; i < list.length; i++) {
temp[i] = "<html>Import '" + className + "' <font color=#777777>(" + list[i] + ")</font></html>";
}
// showImportSuggestion(temp, evt.getXOnScreen(), evt.getYOnScreen() - 3 * getFont().getSize());
Point mouse = MouseInfo.getPointerInfo().getLocation();
showImportSuggestion(temp, mouse.x, mouse.y);
} else {
errorTableClick(item);
}
}
JFrame frmImportSuggest;
private void showImportSuggestion(String[] list, int x, int y) {
if (frmImportSuggest != null) {
// frmImportSuggest.setVisible(false);
// frmImportSuggest = null;
return;
}
final JList<String> classList = new JList<>(list);
classList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
frmImportSuggest = new JFrame();
frmImportSuggest.setUndecorated(true);
frmImportSuggest.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
JPanel panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
panel.setBackground(Color.WHITE);
frmImportSuggest.setBackground(Color.WHITE);
panel.add(classList);
JLabel label = new JLabel("<html><div alight = \"left\"><font size = \"2\"><br>(Click to insert)</font></div></html>");
label.setBackground(Color.WHITE);
label.setHorizontalTextPosition(SwingConstants.LEFT);
panel.add(label);
panel.validate();
frmImportSuggest.getContentPane().add(panel);
frmImportSuggest.pack();
classList.addListSelectionListener(e -> {
if (classList.getSelectedValue() != null) {
try {
String t = classList.getSelectedValue().trim();
Messages.log(t);
int x1 = t.indexOf('(');
String impString = "import " + t.substring(x1 + 1, t.indexOf(')')) + ";\n";
int ct = getSketch().getCurrentCodeIndex();
getSketch().setCurrentCode(0);
getTextArea().getDocument().insertString(0, impString, null);
getSketch().setCurrentCode(ct);
} catch (BadLocationException ble) {
Messages.log("Failed to insert import");
ble.printStackTrace();
}
}
frmImportSuggest.setVisible(false);
frmImportSuggest.dispose();
frmImportSuggest = null;
});
frmImportSuggest.addWindowFocusListener(new WindowFocusListener() {
@Override
public void windowLostFocus(WindowEvent e) {
if (frmImportSuggest != null) {
frmImportSuggest.dispose();
frmImportSuggest = null;
}
}
@Override
public void windowGainedFocus(WindowEvent e) {
}
});
frmImportSuggest.setLocation(x, y);
frmImportSuggest.setBounds(x, y, 250, 100);
frmImportSuggest.pack();
frmImportSuggest.setVisible(true);
}
@Override
public void applyPreferences() {
super.applyPreferences();
if (jmode != null) {
jmode.loadPreferences();
Messages.log("Applying prefs");
// trigger it once to refresh UI
//pdex.preferencesChanged();
errorChecker.preferencesChanged();
sketchChanged();
}
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
// TWEAK MODE
static final String PREF_TWEAK_PORT = "tweak.port";
static final String PREF_TWEAK_SHOW_CODE = "tweak.showcode";
public String[] baseCode;
TweakClient tweakClient;
protected void startTweakMode() {
getJavaTextArea().startTweakMode();
}
protected void stopTweakMode(List<List<Handle>> handles) {
tweakClient.shutdown();
getJavaTextArea().stopTweakMode();
// remove space from the code (before and after)
//removeSpacesFromCode();
// check which tabs were modified
boolean[] tweakedTabs = getTweakedTabs(handles);
boolean modified = anythingTrue(tweakedTabs);
if (modified) {
// ask to keep the values
if (Messages.showYesNoQuestion(this, Language.text("tweak_mode"),
Language.text("tweak_mode.keep_changes.line1"),
Language.text("tweak_mode.keep_changes.line2")) == JOptionPane.YES_OPTION) {
for (int i = 0; i < sketch.getCodeCount(); i++) {
if (tweakedTabs[i]) {
sketch.getCode(i).setModified(true);
} else {
// load the saved code of tabs that didn't change
// (there might be formatting changes that should not be saved)
sketch.getCode(i).setProgram(sketch.getCode(i).getSavedProgram());
/* Wild Hack: set document to null so the text editor will refresh
the program contents when the document tab is being clicked */
sketch.getCode(i).setDocument(null);
if (i == sketch.getCurrentCodeIndex()) {
// this will update the current code
setCode(sketch.getCurrentCode());
}
}
}
// save the sketch
try {
sketch.save();
} catch (IOException e) {
Messages.showWarning("Error", "Could not save the modified sketch.", e);
}
// repaint the editor header (show the modified tabs)
header.repaint();
textarea.invalidate();
} else { // no or canceled = don't keep changes
loadSavedCode();
// update the painter to draw the saved (old) code
textarea.invalidate();
}
} else {
// number values were not modified, but we need to load
// the saved code because of some formatting changes
loadSavedCode();
textarea.invalidate();
}
}
static private boolean anythingTrue(boolean[] list) {
for (boolean b : list) {
if (b) return true;
}
return false;
}
protected void updateInterface(List<List<Handle>> handles,
List<List<ColorControlBox>> colorBoxes) {
getJavaTextArea().updateInterface(handles, colorBoxes);
}
static private boolean[] getTweakedTabs(List<List<Handle>> handles) {
boolean[] outgoing = new boolean[handles.size()];
for (int i = 0; i < handles.size(); i++) {
for (Handle h : handles.get(i)) {
if (h.valueChanged()) {
outgoing[i] = true;
}
}
}
return outgoing;
}
protected void initBaseCode() {
SketchCode[] code = sketch.getCode();
baseCode = new String[code.length];
for (int i = 0; i < code.length; i++) {
baseCode[i] = code[i].getSavedProgram();
}
}
protected void initEditorCode(List<List<Handle>> handles) {
SketchCode[] sketchCode = sketch.getCode();
for (int tab=0; tab<baseCode.length; tab++) {
// beautify the numbers
int charInc = 0;
String code = baseCode[tab];
for (Handle n : handles.get(tab)) {
int s = n.startChar + charInc;
int e = n.endChar + charInc;
String newStr = n.strNewValue;
code = replaceString(code, s, e, newStr);
n.newStartChar = n.startChar + charInc;
charInc += n.strNewValue.length() - n.strValue.length();
n.newEndChar = n.endChar + charInc;
}
sketchCode[tab].setProgram(code);
/* Wild Hack: set document to null so the text editor will refresh
the program contents when the document tab is being clicked */
sketchCode[tab].setDocument(null);
}
// this will update the current code
setCode(sketch.getCurrentCode());
}
private void loadSavedCode() {
//SketchCode[] code = sketch.getCode();
for (SketchCode code : sketch.getCode()) {
if (!code.getProgram().equals(code.getSavedProgram())) {
code.setProgram(code.getSavedProgram());
/* Wild Hack: set document to null so the text editor will refresh
the program contents when the document tab is being clicked */
code.setDocument(null);
}
}
// this will update the current code
setCode(sketch.getCurrentCode());
}
/**
* Replace all numbers with variables and add code to initialize
* these variables and handle update messages.
*/
protected boolean automateSketch(Sketch sketch, SketchParser parser) {
SketchCode[] code = sketch.getCode();
List<List<Handle>> handles = parser.allHandles;
if (code.length < 1) {
return false;
}
if (handles.size() == 0) {
return false;
}
int afterSizePos = SketchParser.getAfterSizePos(baseCode[0]);
if (afterSizePos < 0) {
return false;
}
// get port number from preferences.txt
int port;
String portStr = Preferences.get(PREF_TWEAK_PORT);
if (portStr == null) {
Preferences.set(PREF_TWEAK_PORT, "auto");
portStr = "auto";
}
if (portStr.equals("auto")) {
// random port for udp (0xc000 - 0xffff)
port = (int)(Math.random()*0x3fff) + 0xc000;
} else {
port = Preferences.getInteger(PREF_TWEAK_PORT);
}
// create the client that will send the new values to the sketch
tweakClient = new TweakClient(port);
// update handles with a reference to the client object
for (int tab=0; tab<code.length; tab++) {
for (Handle h : handles.get(tab)) {
h.setTweakClient(tweakClient);
}
}
// Copy current program to interactive program
// modify the code below, replace all numbers with their variable names
// loop through all tabs in the current sketch
for (int tab=0; tab<code.length; tab++) {
int charInc = 0;
String c = baseCode[tab];
for (Handle n : handles.get(tab)) {
// replace number value with a variable
c = replaceString(c, n.startChar + charInc, n.endChar + charInc, n.name);
charInc += n.name.length() - n.strValue.length();
}
code[tab].setProgram(c);
}
// add the main header to the code in the first tab
String c = code[0].getProgram();
// header contains variable declaration, initialization,
// and OSC listener function
String header;
header = """
/*************************/
/* MODIFIED BY TWEAKMODE */
/*************************/
""";
// add needed OSC imports and the global OSC object
header += "import java.net.*;\n";
header += "import java.io.*;\n";
header += "import java.nio.*;\n\n";
// write a declaration for int and float arrays
int numOfInts = howManyInts(handles);
int numOfFloats = howManyFloats(handles);
if (numOfInts > 0) {
header += "int[] tweakmode_int = new int["+numOfInts+"];\n";
}
if (numOfFloats > 0) {
header += "float[] tweakmode_float = new float["+numOfFloats+"];\n\n";
}
// add the server code that will receive the value change messages
// header += TweakClient.getServerCode(port, numOfInts>0, numOfFloats>0);
header += "TweakModeServer tweakmode_Server;\n";
header += "void tweakmode_initAllVars() {\n";
//for (int i=0; i<handles.length; i++) {
for (List<Handle> list : handles) {
//for (Handle n : handles[i]) {
for (Handle n : list) {
header += " " + n.name + " = " + n.strValue + ";\n";
}
}
header += "}\n\n";
header += "void tweakmode_initCommunication() {\n";
header += " tweakmode_Server = new TweakModeServer();\n";
header += " tweakmode_Server.setup();\n";
header += " tweakmode_Server.start();\n";
header += "}\n";
header += "\n\n\n\n\n";
// add call to our initAllVars and initOSC functions
// from the setup() function.
String addToSetup = """
/* TWEAKMODE */
tweakmode_initAllVars();
tweakmode_initCommunication();
/* TWEAKMODE */
""";
afterSizePos = SketchParser.getAfterSizePos(c);
c = replaceString(c, afterSizePos, afterSizePos, addToSetup);
// Server code defines a class, so it should go later in the sketch
String serverCode =
TweakClient.getServerCode(port, numOfInts>0, numOfFloats>0);
code[0].setProgram(header + c + serverCode);
// print out modified code
String showModCode = Preferences.get(PREF_TWEAK_SHOW_CODE);
if (showModCode == null) {
Preferences.setBoolean(PREF_TWEAK_SHOW_CODE, false);
}
if (Preferences.getBoolean(PREF_TWEAK_SHOW_CODE)) {
System.out.println("\nTweakMode modified code:\n");
for (int i=0; i<code.length; i++) {
System.out.println("tab " + i + "\n");
System.out.println("=======================================================\n");
System.out.println(code[i].getProgram());
}
}
return true;
}
static private String replaceString(String str, int start, int end, String put) {
return str.substring(0, start) + put + str.substring(end);
}
//private int howManyInts(ArrayList<Handle> handles[])
static private int howManyInts(List<List<Handle>> handles) {
int count = 0;
for (List<Handle> list : handles) {
for (Handle n : list) {
if ("int".equals(n.type) || "hex".equals(n.type) || "webcolor".equals(n.type)) {
count++;
}
}
}
return count;
}
//private int howManyFloats(ArrayList<Handle> handles[])
static private int howManyFloats(List<List<Handle>> handles) {
int count = 0;
for (List<Handle> list : handles) {
for (Handle n : list) {
if ("float".equals(n.type)) {
count++;
}
}
}
return count;
}
}
|
JavaEditor
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/util/springframework/Assert.java
|
{
"start": 737,
"end": 1471
}
|
class ____ assists in validating arguments.
*
* <p>
* Useful for identifying programmer errors early and clearly at runtime.
*
* <p>
* For example, if the contract of a public method states it does not allow
* {@code null} arguments, {@code Assert} can be used to validate that contract.
* Doing this clearly indicates a contract violation when it occurs and protects
* the class's invariants.
*
* <p>
* Typically used to validate method arguments rather than configuration
* properties, to check for cases that are usually programmer errors rather than
* configuration errors. In contrast to configuration initialization code, there
* is usually no point in falling back to defaults in such methods.
*
* <p>
* This
|
that
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/function/FailableBiConsumer.java
|
{
"start": 1168,
"end": 2563
}
|
interface ____<T, U, E extends Throwable> {
/** NOP singleton */
@SuppressWarnings("rawtypes")
FailableBiConsumer NOP = (t, u) -> { /* NOP */ };
/**
* Gets the NOP singleton.
*
* @param <T> Consumed type 1.
* @param <U> Consumed type 2.
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <T, U, E extends Throwable> FailableBiConsumer<T, U, E> nop() {
return NOP;
}
/**
* Accepts the given arguments.
*
* @param t the first parameter for the consumable to accept
* @param u the second parameter for the consumable to accept
* @throws E Thrown when the consumer fails.
*/
void accept(T t, U u) throws E;
/**
* Returns a composed {@link FailableBiConsumer} like {@link BiConsumer#andThen(BiConsumer)}.
*
* @param after the operation to perform after this one.
* @return a composed {@link FailableBiConsumer} like {@link BiConsumer#andThen(BiConsumer)}.
* @throws NullPointerException when {@code after} is null.
*/
default FailableBiConsumer<T, U, E> andThen(final FailableBiConsumer<? super T, ? super U, E> after) {
Objects.requireNonNull(after);
return (t, u) -> {
accept(t, u);
after.accept(t, u);
};
}
}
|
FailableBiConsumer
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/core/Maybe.java
|
{
"start": 229216,
"end": 236478
}
|
class ____ creating a {@link MaybeTransformer} with it is recommended.
* <p>
* Note also that it is not possible to stop the subscription phase in {@code lift()} as the {@code apply()} method
* requires a non-{@code null} {@code MaybeObserver} instance to be returned, which is then unconditionally subscribed to
* the current {@code Maybe}. For example, if the operator decided there is no reason to subscribe to the
* upstream source because of some optimization possibility or a failure to prepare the operator, it still has to
* return a {@code MaybeObserver} that should immediately dispose the upstream's {@link Disposable} in its
* {@code onSubscribe} method. Again, using a {@code MaybeTransformer} and extending the {@code Maybe} is
* a better option as {@link #subscribeActual} can decide to not subscribe to its upstream after all.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code lift} does not operate by default on a particular {@link Scheduler}, however, the
* {@code MaybeOperator} may use a {@code Scheduler} to support its own asynchronous behavior.</dd>
* </dl>
*
* @param <R> the output value type
* @param lift the {@code MaybeOperator} that receives the downstream's {@code MaybeObserver} and should return
* a {@code MaybeObserver} with custom behavior to be used as the consumer for the current
* {@code Maybe}.
* @return the new {@code Maybe} instance
* @throws NullPointerException if {@code lift} is {@code null}
* @see <a href="https://github.com/ReactiveX/RxJava/wiki/Writing-operators-for-2.0">RxJava wiki: Writing operators</a>
* @see #compose(MaybeTransformer)
*/
@CheckReturnValue
@NonNull
@SchedulerSupport(SchedulerSupport.NONE)
public final <@NonNull R> Maybe<R> lift(@NonNull MaybeOperator<? extends R, ? super T> lift) {
Objects.requireNonNull(lift, "lift is null");
return RxJavaPlugins.onAssembly(new MaybeLift<>(this, lift));
}
/**
* Returns a {@code Maybe} that applies a specified function to the item emitted by the current {@code Maybe} and
* emits the result of this function application.
* <p>
* <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Maybe.map.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code map} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the result value type
* @param mapper
* a function to apply to the item emitted by the {@code Maybe}
* @return the new {@code Maybe} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/map.html">ReactiveX operators documentation: Map</a>
*/
@CheckReturnValue
@NonNull
@SchedulerSupport(SchedulerSupport.NONE)
public final <@NonNull R> Maybe<R> map(@NonNull Function<? super T, ? extends R> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new MaybeMap<>(this, mapper));
}
/**
* Maps the signal types of this {@code Maybe} into a {@link Notification} of the same kind
* and emits it as a {@link Single}'s {@code onSuccess} value to downstream.
* <p>
* <img width="640" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/materialize.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code materialize} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.2.4 - experimental
* @return the new {@code Single} instance
* @since 3.0.0
* @see Single#dematerialize(Function)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<Notification<T>> materialize() {
return RxJavaPlugins.onAssembly(new MaybeMaterialize<>(this));
}
/**
* Flattens this {@code Maybe} and another {@link MaybeSource} into a single {@link Flowable}, without any transformation.
* <p>
* <img width="640" height="218" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Maybe.mergeWith.png" alt="">
* <p>
* You can combine items emitted by multiple {@code Maybe}s so that they appear as a single {@code Flowable}, by
* using the {@code mergeWith} method.
* <dl>
* <dt><b>Backpressure:</b></dt>
* <dd>The operator honors backpressure from downstream.</dd>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code mergeWith} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param other
* a {@code MaybeSource} to be merged
* @return the new {@code Flowable} instance
* @throws NullPointerException if {@code other} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a>
*/
@BackpressureSupport(BackpressureKind.FULL)
@CheckReturnValue
@NonNull
@SchedulerSupport(SchedulerSupport.NONE)
public final Flowable<T> mergeWith(@NonNull MaybeSource<? extends T> other) {
Objects.requireNonNull(other, "other is null");
return merge(this, other);
}
/**
* Wraps a {@code Maybe} to emit its item (or notify of its error) on a specified {@link Scheduler},
* asynchronously.
* <p>
* <img width="640" height="183" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Maybe.observeOn.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>you specify which {@code Scheduler} this operator will use.</dd>
* </dl>
*
* @param scheduler
* the {@code Scheduler} to notify subscribers on
* @return the new {@code Maybe} instance that its subscribers are notified on the specified
* {@code Scheduler}
* @throws NullPointerException if {@code scheduler} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/observeon.html">ReactiveX operators documentation: ObserveOn</a>
* @see <a href="http://www.grahamlea.com/2014/07/rxjava-threading-examples/">RxJava Threading Examples</a>
* @see #subscribeOn
*/
@CheckReturnValue
@NonNull
@SchedulerSupport(SchedulerSupport.CUSTOM)
public final Maybe<T> observeOn(@NonNull Scheduler scheduler) {
Objects.requireNonNull(scheduler, "scheduler is null");
return RxJavaPlugins.onAssembly(new MaybeObserveOn<>(this, scheduler));
}
/**
* Filters the items emitted by the current {@code Maybe}, only emitting its success value if that
* is an instance of the supplied {@link Class}.
* <p>
* <img width="640" height="291" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Maybe.ofType.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code ofType} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U> the output type
* @param clazz
* the
|
and
|
java
|
apache__camel
|
components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/SendVenueMessage.java
|
{
"start": 1013,
"end": 3739
}
|
class ____ extends OutgoingMessage {
@JsonProperty("longitude")
private double longitude;
@JsonProperty("latitude")
private double latitude;
@JsonProperty("title")
private String title;
@JsonProperty("address")
private String address;
@JsonProperty("foursquare_id")
private String foursquareId;
@JsonProperty("foursquare_type")
private String foursquareType;
@JsonProperty("reply_markup")
private ReplyMarkup replyMarkup;
public SendVenueMessage() {
}
public SendVenueMessage(double latitude, double longitude, String title, String address) {
this.setLatitude(latitude);
this.setLongitude(longitude);
this.setTitle(title);
this.setAddress(address);
}
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
public double getLongitude() {
return longitude;
}
public double getLatitude() {
return latitude;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getFoursquareId() {
return foursquareId;
}
public void setFoursquareId(String foursquareId) {
this.foursquareId = foursquareId;
}
public String getFoursquareType() {
return foursquareType;
}
public void setFoursquareType(String foursquareType) {
this.foursquareType = foursquareType;
}
public ReplyMarkup getReplyMarkup() {
return replyMarkup;
}
public void setReplyMarkup(ReplyMarkup replyMarkup) {
this.replyMarkup = replyMarkup;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("SendLocationMessage{");
sb.append("latitude=").append(latitude).append('\'');
sb.append(", longitude=").append(longitude).append('\'');
sb.append(", title=").append(title).append('\'');
sb.append(", address=").append(address).append('\'');
sb.append(", foursquareId=").append(foursquareId).append('\'');
sb.append(", foursquareType=").append(foursquareType).append('\'');
sb.append(", disableNotification=").append(disableNotification).append('\'');
sb.append(", replyToMessageId=").append(replyToMessageId).append('\'');
sb.append(", replyMarkup=").append(replyMarkup);
sb.append('}');
return sb.toString();
}
}
|
SendVenueMessage
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
|
{
"start": 1206,
"end": 1508
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TeraScheduler.class);
private Split[] splits;
private List<Host> hosts = new ArrayList<Host>();
private int slotsPerHost;
private int remainingSplits = 0;
private FileSplit[] realSplits = null;
static
|
TeraScheduler
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PageSizeUtil.java
|
{
"start": 3478,
"end": 4037
}
|
class ____ {
static int getSystemPageSize() {
Unsafe unsafe = unsafe();
return unsafe == null ? PAGE_SIZE_UNKNOWN : unsafe.pageSize();
}
@Nullable
private static Unsafe unsafe() {
if (PlatformDependent.hasUnsafe()) {
return (Unsafe)
AccessController.doPrivileged(
(PrivilegedAction<Object>) () -> UnsafeAccess.UNSAFE);
} else {
return null;
}
}
}
}
|
PageSizeUtilInternal
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldContainAnyOf.java
|
{
"start": 725,
"end": 1951
}
|
class ____ extends BasicErrorMessageFactory {
private static final String DEFAULT_FORMAT = "%nExpecting actual:%n" +
" %s%n" +
"to contain at least one of the following elements:%n" +
" %s%n" +
"but none were found";
private static final String FORMAT_WITH_COMPARISON_STRATEGY = DEFAULT_FORMAT + " %s";
public static ErrorMessageFactory shouldContainAnyOf(Object actual, Object expected,
ComparisonStrategy comparisonStrategy) {
return new ShouldContainAnyOf(actual, expected, comparisonStrategy);
}
public static ErrorMessageFactory shouldContainAnyOf(Object actual, Object expected) {
return new ShouldContainAnyOf(actual, expected);
}
private ShouldContainAnyOf(Object actual, Object expected, ComparisonStrategy comparisonStrategy) {
super(FORMAT_WITH_COMPARISON_STRATEGY, actual, expected, comparisonStrategy);
}
private ShouldContainAnyOf(Object actual, Object expected) {
super(DEFAULT_FORMAT, actual, expected);
}
}
|
ShouldContainAnyOf
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/consumers/MultipleMessagesSameTopicIT.java
|
{
"start": 1237,
"end": 2768
}
|
class ____ extends AbstractPersistentJMSTest {
@Test
public void testMultipleMessagesOnSameTopic() throws Exception {
getMockEndpoint("mock:a").expectedBodiesReceived("Hello Camel 1", "Hello Camel 2", "Hello Camel 3",
"Hello Camel 4");
getMockEndpoint("mock:b").expectedBodiesReceived("Hello Camel 1", "Hello Camel 2", "Hello Camel 3",
"Hello Camel 4");
template.sendBody("activemq:topic:MultipleMessagesSameTopicIT", "Hello Camel 1");
template.sendBody("activemq:topic:MultipleMessagesSameTopicIT", "Hello Camel 2");
template.sendBody("activemq:topic:MultipleMessagesSameTopicIT", "Hello Camel 3");
template.sendBody("activemq:topic:MultipleMessagesSameTopicIT", "Hello Camel 4");
MockEndpoint.assertIsSatisfied(context);
}
@BeforeEach
void waitForConnections() {
Awaitility.await().until(() -> context.getRoute("a").getUptimeMillis() > 200);
Awaitility.await().until(() -> context.getRoute("b").getUptimeMillis() > 200);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:topic:MultipleMessagesSameTopicIT").routeId("a")
.to("log:a", "mock:a");
from("activemq:topic:MultipleMessagesSameTopicIT").routeId("b")
.to("log:b", "mock:b");
}
};
}
}
|
MultipleMessagesSameTopicIT
|
java
|
apache__camel
|
components/camel-google/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/sql/GoogleBigQuerySQLProducer.java
|
{
"start": 2197,
"end": 12364
}
|
class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(GoogleBigQuerySQLProducer.class);
private final GoogleBigQuerySQLConfiguration configuration;
private final BigQuery bigquery;
private String query;
private Set<String> queryParameterNames;
public GoogleBigQuerySQLProducer(BigQuery bigquery, GoogleBigQuerySQLEndpoint endpoint,
GoogleBigQuerySQLConfiguration configuration) {
super(endpoint);
this.bigquery = bigquery;
this.configuration = configuration;
}
/**
* Processes the exchange by executing a SQL query against BigQuery.
* <p>
* Query parameters can be provided via:
* <ul>
* <li>Message body as {@code Map<String, Object>}</li>
* <li>Message headers</li>
* </ul>
* <p>
* For SELECT queries, results are returned based on {@link OutputType}:
* <ul>
* <li>SELECT_LIST - {@code List<Map<String, Object>>} with pagination headers</li>
* <li>STREAM_LIST - {@code Iterator<Map<String, Object>>} for streaming</li>
* </ul>
* <p>
* For DML queries, returns the number of affected rows as {@code Long}.
*/
@Override
public void process(Exchange exchange) throws Exception {
String translatedQuery = SqlHelper.translateQuery(query, exchange);
Map<String, Object> queryParameters = extractParameters(exchange);
Message message = exchange.getMessage();
message.setHeader(GoogleBigQueryConstants.TRANSLATED_QUERY, translatedQuery);
JobId jobId = message.getHeader(GoogleBigQueryConstants.JOB_ID, JobId.class);
Job job = executeJob(jobId, translatedQuery, queryParameters);
if (isSelectQueryJob(job)) {
processSelectQueryJob(message, job);
} else {
long affectedRows = job.<JobStatistics.QueryStatistics> getStatistics().getNumDmlAffectedRows();
LOG.debug("The query {} affected {} rows", query, affectedRows);
message.setBody(affectedRows);
}
}
/**
* Processes SELECT query results based on the configured output type. Both types use pageSize for fetching results
* from BigQuery. For SELECT_LIST, loads current page into memory and sets pagination headers. For STREAM_LIST,
* creates an iterator that automatically fetches pages using pageSize.
*/
private void processSelectQueryJob(Message message, Job job) throws Exception {
long pageSize = configuration.getPageSize();
String pageToken = message.getHeader(GoogleBigQueryConstants.PAGE_TOKEN, configuration::getPageToken, String.class);
TableResult result = getTableResult(job, pageSize, pageToken);
Schema schema = result.getSchema();
if (schema == null) {
LOG.debug("Query result schema is null. Unable to process the result set.");
message.setBody(result.getTotalRows());
return;
}
switch (configuration.getOutputType()) {
case SELECT_LIST -> {
List<Map<String, Object>> rows = processSelectResult(result, schema);
LOG.debug("The query {} returned {} rows", query, rows.size());
message.setBody(rows);
message.setHeader(GoogleBigQueryConstants.NEXT_PAGE_TOKEN, result.getNextPageToken());
message.setHeader(GoogleBigQueryConstants.JOB_ID, job.getJobId());
}
case STREAM_LIST -> {
var iterator = new StreamListIterator<>(
new FieldValueListMapper(schema.getFields()),
result.iterateAll().iterator());
message.setBody(iterator);
}
}
}
/**
* Executes a BigQuery job, either by retrieving an existing job or creating a new one. If jobId is provided,
* retrieves the existing job; otherwise creates a new query job. Waits for the job to complete before returning.
*/
private Job executeJob(JobId jobId, String translatedQuery, Map<String, Object> queryParameters) throws Exception {
QueryJobConfiguration.Builder builder = QueryJobConfiguration.newBuilder(translatedQuery)
.setUseLegacySql(false);
setQueryParameters(queryParameters, builder);
QueryJobConfiguration queryJobConfiguration = builder.build();
try {
if (LOG.isTraceEnabled()) {
LOG.trace("Sending query to bigquery standard sql: {}", translatedQuery);
}
var job = ObjectHelper.isNotEmpty(jobId)
? bigquery.getJob(jobId)
: bigquery.create(getJobInfo(queryJobConfiguration));
return job.waitFor();
} catch (BigQueryException e) {
throw new Exception("Query " + translatedQuery + " failed: " + e.getError(), e);
}
}
/**
* Creates JobInfo with a random job ID for the given query configuration.
*/
private JobInfo getJobInfo(QueryJobConfiguration queryJobConfiguration) {
return JobInfo.of(
JobId.newBuilder()
.setRandomJob()
.setProject(configuration.getProjectId())
.build(),
queryJobConfiguration);
}
/**
* Retrieves query results from a completed job with optional pagination.
*/
private TableResult getTableResult(Job job, long pageSize, String pageToken)
throws Exception {
String translatedQuery = job.<QueryJobConfiguration> getConfiguration().getQuery();
try {
QueryResultsOption[] queryResultsOptions = getQueryResultsOptions(pageSize, pageToken);
return job.getQueryResults(queryResultsOptions);
} catch (BigQueryException e) {
throw new Exception("Query " + translatedQuery + " failed: " + e.getError(), e);
}
}
/**
* Builds query result options array from pageSize and pageToken. Only includes options with non-default values
* (pageSize > 0, pageToken != null).
*/
private static QueryResultsOption[] getQueryResultsOptions(long pageSize, String pageToken) {
List<QueryResultsOption> options = new ArrayList<>();
if (pageSize > 0) {
options.add(QueryResultsOption.pageSize(pageSize));
}
if (pageToken != null) {
options.add(QueryResultsOption.pageToken(pageToken));
}
return options.toArray(new QueryResultsOption[0]);
}
/**
* Checks if the job is a SELECT query by examining its statement type.
*/
private static boolean isSelectQueryJob(Job job) {
JobStatistics.QueryStatistics statistics = job.getStatistics();
return statistics.getStatementType().equals(JobStatistics.QueryStatistics.StatementType.SELECT);
}
/**
* Extracts query parameters from exchange headers and body. Parameters are identified by names found in the query
* (e.g., @paramName). Body values take precedence over header values.
*
* @throws RuntimeExchangeException if a required parameter is not found
*/
private Map<String, Object> extractParameters(Exchange exchange) {
if (queryParameterNames == null || queryParameterNames.isEmpty()) {
return null;
}
Message message = exchange.getMessage();
HashMap<String, Object> headers = new HashMap<>(message.getHeaders());
if (message.getBody() instanceof Map) {
try {
headers.putAll(message.getBody(Map.class));
} catch (ClassCastException e) {
LOG.warn("Unable to perform cast while extracting header parameters: {}", e.getMessage(), e);
}
}
HashMap<String, Object> result = new HashMap<>(queryParameterNames.size());
queryParameterNames.forEach(s -> {
Object value = headers.get(s);
if (value == null) {
throw new RuntimeExchangeException(
"SQL parameter with name '" + s + "' not found in the message headers", exchange);
}
result.put(s, headers.get(s));
});
return result;
}
private void setQueryParameters(Map<String, Object> params, QueryJobConfiguration.Builder builder) {
if (params == null) {
return;
}
params.forEach((key, value) -> {
QueryParameterValue parameterValue;
try {
parameterValue = QueryParameterValue.of(value, (Class<Object>) value.getClass());
} catch (IllegalArgumentException e) {
LOG.warn("{} Fallback to *.toString() value.", e.getMessage());
//use String representation
parameterValue = QueryParameterValue.of(value.toString(), StandardSQLTypeName.STRING);
}
builder.addNamedParameter(key, parameterValue);
});
}
/**
* Converts TableResult to a list of maps for SELECT_LIST output type. Each map represents one row with field names
* as keys.
*/
private List<Map<String, Object>> processSelectResult(TableResult result, Schema schema) {
var mapper = new FieldValueListMapper(schema.getFields());
return StreamSupport.stream(result.getValues().spliterator(), false)
.map(mapper::map)
.toList();
}
@Override
public GoogleBigQuerySQLEndpoint getEndpoint() {
return (GoogleBigQuerySQLEndpoint) super.getEndpoint();
}
/**
* Initializes the producer by resolving the query string and extracting parameter names. Query resolution supports
* file references and variable substitution.
*/
@Override
protected void doStart() throws Exception {
super.doStart();
String placeholder = ":#";
query = SqlHelper.resolveQuery(getEndpoint().getCamelContext(), configuration.getQueryString(), placeholder);
queryParameterNames = SqlHelper.extractParameterNames(query);
}
}
|
GoogleBigQuerySQLProducer
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ext/javatime/ser/ZonedDateTimeSerWithJsonFormat333Test.java
|
{
"start": 807,
"end": 1555
}
|
class ____ {
@JsonFormat(shape = JsonFormat.Shape.STRING)
public ZonedDateTime value;
}
private final ObjectMapper MAPPER = mapperBuilder()
.enable(DateTimeFeature.WRITE_DATES_WITH_ZONE_ID)
.build();
@Test
public void testJsonFormatOverridesSerialization() throws Exception
{
// ISO-8601 string for ZonedDateTime
ZonedDateTime zonedDateTime = ZonedDateTime.parse("2024-11-15T18:27:06.921054+01:00[Europe/Berlin]");
ContainerWithPattern333 input = new ContainerWithPattern333();
input.value = zonedDateTime;
assertEquals(a2q("{'value':'2024-11-15 18:27:06 CET'}"),
MAPPER.writeValueAsString(input));
}
}
|
ContainerWithoutPattern333
|
java
|
apache__thrift
|
contrib/thrift-maven-plugin/src/main/java/org/apache/thrift/maven/AbstractThriftMojo.java
|
{
"start": 2531,
"end": 15445
}
|
class ____ extends AbstractMojo {
private static final String THRIFT_FILE_SUFFIX = ".thrift";
private static final String DEFAULT_INCLUDES = "**/*" + THRIFT_FILE_SUFFIX;
/**
* The current Maven project.
*
* @parameter default-value="${project}"
* @readonly
* @required
*/
protected MavenProject project;
/**
* A helper used to add resources to the project.
*
* @component
* @required
*/
protected MavenProjectHelper projectHelper;
/**
* This is the path to the {@code thrift} executable. By default it will search the {@code $PATH}.
*
* @parameter default-value="thrift"
* @required
*/
private String thriftExecutable;
/**
* This string is passed to the {@code --gen} option of the {@code thrift} parameter. By default
* it will generate Java output. The main reason for this option is to be able to add options
* to the Java generator - if you generate something else, you're on your own.
*
* @parameter default-value="java"
*/
private String generator;
/**
* @parameter
*/
private File[] additionalThriftPathElements = new File[]{};
/**
* Since {@code thrift} cannot access jars, thrift files in dependencies are extracted to this location
* and deleted on exit. This directory is always cleaned during execution.
*
* @parameter default-value="${project.build.directory}/thrift-dependencies"
* @required
*/
private File temporaryThriftFileDirectory;
/**
* This is the path to the local maven {@code repository}.
*
* @parameter default-value="${localRepository}"
* @required
*/
protected ArtifactRepository localRepository;
/**
* Set this to {@code false} to disable hashing of dependent jar paths.
* <p/>
* This plugin expands jars on the classpath looking for embedded .thrift files.
* Normally these paths are hashed (MD5) to avoid issues with long file names on windows.
* However if this property is set to {@code false} longer paths will be used.
*
* @parameter default-value="true"
* @required
*/
protected boolean hashDependentPaths;
/**
* @parameter
*/
private Set<String> includes = ImmutableSet.of(DEFAULT_INCLUDES);
/**
* @parameter
*/
private Set<String> excludes = ImmutableSet.of();
/**
* @parameter
*/
private long staleMillis = 0;
/**
* @parameter
*/
private boolean checkStaleness = false;
/**
* Executes the mojo.
*/
public void execute() throws MojoExecutionException, MojoFailureException {
checkParameters();
final File thriftSourceRoot = getThriftSourceRoot();
if (thriftSourceRoot.exists()) {
try {
ImmutableSet<File> thriftFiles = findThriftFilesInDirectory(thriftSourceRoot);
final File outputDirectory = getOutputDirectory();
ImmutableSet<File> outputFiles = findGeneratedFilesInDirectory(getOutputDirectory());
if (thriftFiles.isEmpty()) {
getLog().info("No thrift files to compile.");
} else if (checkStaleness && ((lastModified(thriftFiles) + staleMillis) < lastModified(outputFiles))) {
getLog().info("Skipping compilation because target directory newer than sources.");
attachFiles();
} else {
ImmutableSet<File> derivedThriftPathElements =
makeThriftPathFromJars(temporaryThriftFileDirectory, getDependencyArtifactFiles());
outputDirectory.mkdirs();
// Quick fix to fix issues with two mvn installs in a row (ie no clean)
// cleanDirectory(outputDirectory);
Thrift thrift = new Thrift.Builder(thriftExecutable, outputDirectory)
.setGenerator(generator)
.addThriftPathElement(thriftSourceRoot)
.addThriftPathElements(derivedThriftPathElements)
.addThriftPathElements(asList(additionalThriftPathElements))
.addThriftFiles(thriftFiles)
.build();
final int exitStatus = thrift.compile();
if (exitStatus != 0) {
getLog().error("thrift failed output: " + thrift.getOutput());
getLog().error("thrift failed error: " + thrift.getError());
throw new MojoFailureException(
"thrift did not exit cleanly. Review output for more information.");
}
attachFiles();
}
} catch (IOException e) {
throw new MojoExecutionException("An IO error occurred", e);
} catch (IllegalArgumentException e) {
throw new MojoFailureException("thrift failed to execute because: " + e.getMessage(), e);
} catch (CommandLineException e) {
throw new MojoExecutionException("An error occurred while invoking thrift.", e);
}
} else {
getLog().info(format("%s does not exist. Review the configuration or consider disabling the plugin.",
thriftSourceRoot));
}
}
ImmutableSet<File> findGeneratedFilesInDirectory(File directory) throws IOException {
if (directory == null || !directory.isDirectory())
return ImmutableSet.of();
List<File> javaFilesInDirectory = getFiles(directory, "**/*.java", null);
return ImmutableSet.copyOf(javaFilesInDirectory);
}
private long lastModified(ImmutableSet<File> files) {
long result = 0;
for (File file : files) {
if (file.lastModified() > result)
result = file.lastModified();
}
return result;
}
private void checkParameters() {
checkNotNull(project, "project");
checkNotNull(projectHelper, "projectHelper");
checkNotNull(thriftExecutable, "thriftExecutable");
checkNotNull(generator, "generator");
final File thriftSourceRoot = getThriftSourceRoot();
checkNotNull(thriftSourceRoot);
checkArgument(!thriftSourceRoot.isFile(), "thriftSourceRoot is a file, not a directory");
checkNotNull(temporaryThriftFileDirectory, "temporaryThriftFileDirectory");
checkState(!temporaryThriftFileDirectory.isFile(), "temporaryThriftFileDirectory is a file, not a directory");
final File outputDirectory = getOutputDirectory();
checkNotNull(outputDirectory);
checkState(!outputDirectory.isFile(), "the outputDirectory is a file, not a directory");
}
protected abstract File getThriftSourceRoot();
protected abstract List<Artifact> getDependencyArtifacts();
protected abstract File getOutputDirectory();
protected abstract void attachFiles();
/**
* Gets the {@link File} for each dependency artifact.
*
* @return A set of all dependency artifacts.
*/
private ImmutableSet<File> getDependencyArtifactFiles() {
Set<File> dependencyArtifactFiles = newHashSet();
for (Artifact artifact : getDependencyArtifacts()) {
dependencyArtifactFiles.add(artifact.getFile());
}
return ImmutableSet.copyOf(dependencyArtifactFiles);
}
/**
* @throws IOException
*/
ImmutableSet<File> makeThriftPathFromJars(File temporaryThriftFileDirectory, Iterable<File> classpathElementFiles)
throws IOException, MojoExecutionException {
checkNotNull(classpathElementFiles, "classpathElementFiles");
// clean the temporary directory to ensure that stale files aren't used
if (temporaryThriftFileDirectory.exists()) {
cleanDirectory(temporaryThriftFileDirectory);
}
Set<File> thriftDirectories = newHashSet();
for (File classpathElementFile : classpathElementFiles) {
// for some reason under IAM, we receive poms as dependent files
// I am excluding .xml rather than including .jar as there may be other extensions in use (sar, har, zip)
if (classpathElementFile.isFile() && classpathElementFile.canRead() &&
!classpathElementFile.getName().endsWith(".xml")) {
// create the jar file. the constructor validates.
JarFile classpathJar;
try {
classpathJar = new JarFile(classpathElementFile);
} catch (IOException e) {
throw new IllegalArgumentException(format(
"%s was not a readable artifact", classpathElementFile));
}
/**
* Copy each .thrift file found in the JAR into a temporary directory, preserving the
* directory path it had relative to its containing JAR. Add the resulting root directory
* (unique for each JAR processed) to the set of thrift include directories to use when
* compiling.
*/
for (JarEntry jarEntry : list(classpathJar.entries())) {
final String jarEntryName = jarEntry.getName();
if (jarEntry.getName().endsWith(THRIFT_FILE_SUFFIX)) {
final String truncatedJarPath = truncatePath(classpathJar.getName());
final File thriftRootDirectory = new File(temporaryThriftFileDirectory, truncatedJarPath);
final File uncompressedCopy =
new File(thriftRootDirectory, jarEntryName);
uncompressedCopy.getParentFile().mkdirs();
copyStreamToFile(new RawInputStreamFacade(classpathJar
.getInputStream(jarEntry)), uncompressedCopy);
thriftDirectories.add(thriftRootDirectory);
}
}
} else if (classpathElementFile.isDirectory()) {
File[] thriftFiles = classpathElementFile.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith(THRIFT_FILE_SUFFIX);
}
});
if (thriftFiles.length > 0) {
thriftDirectories.add(classpathElementFile);
}
}
}
return ImmutableSet.copyOf(thriftDirectories);
}
ImmutableSet<File> findThriftFilesInDirectory(File directory) throws IOException {
checkNotNull(directory);
checkArgument(directory.isDirectory(), "%s is not a directory", directory);
List<File> thriftFilesInDirectory = getFiles(directory,
Joiner.on(",").join(includes),
Joiner.on(",").join(excludes));
return ImmutableSet.copyOf(thriftFilesInDirectory);
}
/**
* Truncates the path of jar files so that they are relative to the local repository.
*
* @param jarPath the full path of a jar file.
* @return the truncated path relative to the local repository or root of the drive.
*/
String truncatePath(final String jarPath) throws MojoExecutionException {
if (hashDependentPaths) {
try {
return toHexString(MessageDigest.getInstance("MD5").digest(jarPath.getBytes()));
} catch (NoSuchAlgorithmException e) {
throw new MojoExecutionException("Failed to expand dependent jar", e);
}
}
String repository = localRepository.getBasedir().replace('\\', '/');
if (!repository.endsWith("/")) {
repository += "/";
}
String path = jarPath.replace('\\', '/');
int repositoryIndex = path.indexOf(repository);
if (repositoryIndex != -1) {
path = path.substring(repositoryIndex + repository.length());
}
// By now the path should be good, but do a final check to fix windows machines.
int colonIndex = path.indexOf(':');
if (colonIndex != -1) {
// 2 = :\ in C:\
path = path.substring(colonIndex + 2);
}
return path;
}
private static final char[] HEX_CHARS = "0123456789abcdef".toCharArray();
public static String toHexString(byte[] byteArray) {
final StringBuilder hexString = new StringBuilder(2 * byteArray.length);
for (final byte b : byteArray) {
hexString.append(HEX_CHARS[(b & 0xF0) >> 4]).append(HEX_CHARS[b & 0x0F]);
}
return hexString.toString();
}
}
|
AbstractThriftMojo
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/clientproxy/bridgemethod/Complex.java
|
{
"start": 706,
"end": 785
}
|
class ____ extends AnnotationLiteral<Complex> implements Complex {
}
}
|
Literal
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryTransformers.java
|
{
"start": 848,
"end": 944
}
|
class ____ common query transformations.
*
* @author Mark Paluch
* @since 3.2.5
*/
|
encapsulating
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ObjectOfTypeStrategy.java
|
{
"start": 1573,
"end": 1790
}
|
class ____ and
* key-value pairs. The function signature is: {@code OBJECT_OF(className, key1, value1, key2,
* value2, ...)}
*
* <p>The strategy performs the following operations:
*
* <ul>
* <li>Extracts the
|
name
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/view/xslt/XsltViewResolverTests.java
|
{
"start": 957,
"end": 1639
}
|
class ____ {
@Test
void resolveView() throws Exception {
StaticApplicationContext ctx = new StaticApplicationContext();
String prefix = ClassUtils.classPackageAsResourcePath(getClass());
String suffix = ".xsl";
String viewName = "products";
XsltViewResolver viewResolver = new XsltViewResolver();
viewResolver.setPrefix(prefix);
viewResolver.setSuffix(suffix);
viewResolver.setApplicationContext(ctx);
XsltView view = (XsltView) viewResolver.resolveViewName(viewName, Locale.ENGLISH);
assertThat(view).as("View should not be null").isNotNull();
assertThat(view.getUrl()).as("Incorrect URL").isEqualTo((prefix + viewName + suffix));
}
}
|
XsltViewResolverTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java
|
{
"start": 987,
"end": 2900
}
|
class ____ extends AbstractScalarFunctionTestCase {
public ToVersionTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
// TODO multivalue fields
String read = "Attribute[channel=0]";
String stringEvaluator = "ToVersionFromStringEvaluator[asString=" + read + "]";
List<TestCaseSupplier> suppliers = new ArrayList<>();
// Converting and IP to an IP doesn't change anything. Everything should succeed.
TestCaseSupplier.forUnaryVersion(suppliers, read, DataType.VERSION, Version::toBytesRef, List.of());
// None of the random strings ever look like versions so they should all become "invalid" versions:
// https://github.com/elastic/elasticsearch/issues/98989
// TODO should this return null with warnings? they aren't version shaped at all.
TestCaseSupplier.forUnaryStrings(
suppliers,
stringEvaluator,
DataType.VERSION,
bytesRef -> new Version(bytesRef.utf8ToString()).toBytesRef(),
List.of()
);
// But strings that are shaped like versions do parse to valid versions
for (DataType inputType : DataType.stringTypes()) {
TestCaseSupplier.unary(
suppliers,
read,
TestCaseSupplier.versionCases(inputType.typeName() + " "),
DataType.VERSION,
bytesRef -> new Version((BytesRef) bytesRef).toBytesRef(),
List.of()
);
}
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new ToVersion(source, args.get(0));
}
}
|
ToVersionTests
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configuration/OAuth2AuthorizedClientManagerConfigurationTests.java
|
{
"start": 4830,
"end": 15772
}
|
class ____ {
private static OAuth2AccessTokenResponseClient<? super AbstractOAuth2AuthorizationGrantRequest> MOCK_RESPONSE_CLIENT;
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
private OAuth2AuthorizedClientManager authorizedClientManager;
@Autowired
private ClientRegistrationRepository clientRegistrationRepository;
@Autowired
private OAuth2AuthorizedClientRepository authorizedClientRepository;
@Autowired(required = false)
private AuthorizationCodeOAuth2AuthorizedClientProvider authorizationCodeAuthorizedClientProvider;
private MockHttpServletRequest request;
private MockHttpServletResponse response;
@BeforeEach
@SuppressWarnings("unchecked")
public void setUp() {
MOCK_RESPONSE_CLIENT = mock(OAuth2AccessTokenResponseClient.class);
this.request = new MockHttpServletRequest();
this.response = new MockHttpServletResponse();
}
@Test
public void loadContextWhenOAuth2ClientEnabledThenConfigured() {
this.spring.register(MinimalOAuth2ClientConfig.class).autowire();
assertThat(this.authorizedClientManager).isNotNull();
}
@Test
public void authorizeWhenAuthorizationCodeAuthorizedClientProviderBeanThenUsed() {
this.spring.register(CustomAuthorizedClientProvidersConfig.class).autowire();
TestingAuthenticationToken authentication = new TestingAuthenticationToken("user", null);
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId("google")
.principal(authentication)
.attribute(HttpServletRequest.class.getName(), this.request)
.attribute(HttpServletResponse.class.getName(), this.response)
.build();
assertThatExceptionOfType(ClientAuthorizationRequiredException.class)
.isThrownBy(() -> this.authorizedClientManager.authorize(authorizeRequest))
.extracting(OAuth2AuthorizationException::getError)
.extracting(OAuth2Error::getErrorCode)
.isEqualTo("client_authorization_required");
// @formatter:on
verify(this.authorizationCodeAuthorizedClientProvider).authorize(any(OAuth2AuthorizationContext.class));
}
@Test
public void authorizeWhenRefreshTokenAccessTokenResponseClientBeanThenUsed() {
this.spring.register(CustomAccessTokenResponseClientsConfig.class).autowire();
testRefreshTokenGrant();
}
@Test
public void authorizeWhenRefreshTokenAuthorizedClientProviderBeanThenUsed() {
this.spring.register(CustomAuthorizedClientProvidersConfig.class).autowire();
testRefreshTokenGrant();
}
private void testRefreshTokenGrant() {
OAuth2AccessTokenResponse accessTokenResponse = TestOAuth2AccessTokenResponses.accessTokenResponse().build();
given(MOCK_RESPONSE_CLIENT.getTokenResponse(any(OAuth2RefreshTokenGrantRequest.class)))
.willReturn(accessTokenResponse);
TestingAuthenticationToken authentication = new TestingAuthenticationToken("user", null);
ClientRegistration clientRegistration = this.clientRegistrationRepository.findByRegistrationId("google");
OAuth2AuthorizedClient existingAuthorizedClient = new OAuth2AuthorizedClient(clientRegistration,
authentication.getName(), getExpiredAccessToken(), TestOAuth2RefreshTokens.refreshToken());
this.authorizedClientRepository.saveAuthorizedClient(existingAuthorizedClient, authentication, this.request,
this.response);
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withAuthorizedClient(existingAuthorizedClient)
.principal(authentication)
.attribute(HttpServletRequest.class.getName(), this.request)
.attribute(HttpServletResponse.class.getName(), this.response)
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
assertThat(authorizedClient).isNotNull();
ArgumentCaptor<OAuth2RefreshTokenGrantRequest> grantRequestCaptor = ArgumentCaptor
.forClass(OAuth2RefreshTokenGrantRequest.class);
verify(MOCK_RESPONSE_CLIENT).getTokenResponse(grantRequestCaptor.capture());
OAuth2RefreshTokenGrantRequest grantRequest = grantRequestCaptor.getValue();
assertThat(grantRequest.getClientRegistration().getRegistrationId())
.isEqualTo(clientRegistration.getRegistrationId());
assertThat(grantRequest.getGrantType()).isEqualTo(AuthorizationGrantType.REFRESH_TOKEN);
assertThat(grantRequest.getAccessToken()).isEqualTo(existingAuthorizedClient.getAccessToken());
assertThat(grantRequest.getRefreshToken()).isEqualTo(existingAuthorizedClient.getRefreshToken());
}
@Test
public void authorizeWhenClientCredentialsAccessTokenResponseClientBeanThenUsed() {
this.spring.register(CustomAccessTokenResponseClientsConfig.class).autowire();
testClientCredentialsGrant();
}
@Test
public void authorizeWhenClientCredentialsAuthorizedClientProviderBeanThenUsed() {
this.spring.register(CustomAuthorizedClientProvidersConfig.class).autowire();
testClientCredentialsGrant();
}
private void testClientCredentialsGrant() {
OAuth2AccessTokenResponse accessTokenResponse = TestOAuth2AccessTokenResponses.accessTokenResponse().build();
given(MOCK_RESPONSE_CLIENT.getTokenResponse(any(OAuth2ClientCredentialsGrantRequest.class)))
.willReturn(accessTokenResponse);
TestingAuthenticationToken authentication = new TestingAuthenticationToken("user", null);
ClientRegistration clientRegistration = this.clientRegistrationRepository.findByRegistrationId("github");
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(clientRegistration.getRegistrationId())
.principal(authentication)
.attribute(HttpServletRequest.class.getName(), this.request)
.attribute(HttpServletResponse.class.getName(), this.response)
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
assertThat(authorizedClient).isNotNull();
ArgumentCaptor<OAuth2ClientCredentialsGrantRequest> grantRequestCaptor = ArgumentCaptor
.forClass(OAuth2ClientCredentialsGrantRequest.class);
verify(MOCK_RESPONSE_CLIENT).getTokenResponse(grantRequestCaptor.capture());
OAuth2ClientCredentialsGrantRequest grantRequest = grantRequestCaptor.getValue();
assertThat(grantRequest.getClientRegistration().getRegistrationId())
.isEqualTo(clientRegistration.getRegistrationId());
assertThat(grantRequest.getGrantType()).isEqualTo(AuthorizationGrantType.CLIENT_CREDENTIALS);
}
@Test
public void authorizeWhenJwtBearerAccessTokenResponseClientBeanThenUsed() {
this.spring.register(CustomAccessTokenResponseClientsConfig.class).autowire();
testJwtBearerGrant();
}
@Test
public void authorizeWhenJwtBearerAuthorizedClientProviderBeanThenUsed() {
this.spring.register(CustomAuthorizedClientProvidersConfig.class).autowire();
testJwtBearerGrant();
}
private void testJwtBearerGrant() {
OAuth2AccessTokenResponse accessTokenResponse = TestOAuth2AccessTokenResponses.accessTokenResponse().build();
given(MOCK_RESPONSE_CLIENT.getTokenResponse(any(JwtBearerGrantRequest.class))).willReturn(accessTokenResponse);
JwtAuthenticationToken authentication = new JwtAuthenticationToken(getJwt());
ClientRegistration clientRegistration = this.clientRegistrationRepository.findByRegistrationId("okta");
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(clientRegistration.getRegistrationId())
.principal(authentication)
.attribute(HttpServletRequest.class.getName(), this.request)
.attribute(HttpServletResponse.class.getName(), this.response)
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
assertThat(authorizedClient).isNotNull();
ArgumentCaptor<JwtBearerGrantRequest> grantRequestCaptor = ArgumentCaptor.forClass(JwtBearerGrantRequest.class);
verify(MOCK_RESPONSE_CLIENT).getTokenResponse(grantRequestCaptor.capture());
JwtBearerGrantRequest grantRequest = grantRequestCaptor.getValue();
assertThat(grantRequest.getClientRegistration().getRegistrationId())
.isEqualTo(clientRegistration.getRegistrationId());
assertThat(grantRequest.getGrantType()).isEqualTo(AuthorizationGrantType.JWT_BEARER);
assertThat(grantRequest.getJwt().getSubject()).isEqualTo("user");
}
@Test
public void authorizeWhenTokenExchangeAccessTokenResponseClientBeanThenUsed() {
this.spring.register(CustomAccessTokenResponseClientsConfig.class).autowire();
testTokenExchangeGrant();
}
@Test
public void authorizeWhenTokenExchangeAuthorizedClientProviderBeanThenUsed() {
this.spring.register(CustomAuthorizedClientProvidersConfig.class).autowire();
testTokenExchangeGrant();
}
private void testTokenExchangeGrant() {
OAuth2AccessTokenResponse accessTokenResponse = TestOAuth2AccessTokenResponses.accessTokenResponse().build();
given(MOCK_RESPONSE_CLIENT.getTokenResponse(any(TokenExchangeGrantRequest.class)))
.willReturn(accessTokenResponse);
JwtAuthenticationToken authentication = new JwtAuthenticationToken(getJwt());
ClientRegistration clientRegistration = this.clientRegistrationRepository.findByRegistrationId("auth0");
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(clientRegistration.getRegistrationId())
.principal(authentication)
.attribute(HttpServletRequest.class.getName(), this.request)
.attribute(HttpServletResponse.class.getName(), this.response)
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
assertThat(authorizedClient).isNotNull();
ArgumentCaptor<TokenExchangeGrantRequest> grantRequestCaptor = ArgumentCaptor
.forClass(TokenExchangeGrantRequest.class);
verify(MOCK_RESPONSE_CLIENT).getTokenResponse(grantRequestCaptor.capture());
TokenExchangeGrantRequest grantRequest = grantRequestCaptor.getValue();
assertThat(grantRequest.getClientRegistration().getRegistrationId())
.isEqualTo(clientRegistration.getRegistrationId());
assertThat(grantRequest.getGrantType()).isEqualTo(AuthorizationGrantType.TOKEN_EXCHANGE);
assertThat(grantRequest.getSubjectToken()).isEqualTo(authentication.getToken());
}
private static OAuth2AccessToken getExpiredAccessToken() {
Instant expiresAt = Instant.now().minusSeconds(60);
Instant issuedAt = expiresAt.minus(Duration.ofDays(1));
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", issuedAt, expiresAt,
new HashSet<>(Arrays.asList("read", "write")));
}
private static Jwt getJwt() {
Instant issuedAt = Instant.now();
return new Jwt("token", issuedAt, issuedAt.plusSeconds(300),
Collections.singletonMap(JoseHeaderNames.ALG, "RS256"),
Collections.singletonMap(JwtClaimNames.SUB, "user"));
}
@Configuration
@EnableWebSecurity
static
|
OAuth2AuthorizedClientManagerConfigurationTests
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/endpoint/ReactiveOAuth2AccessTokenResponseClient.java
|
{
"start": 1779,
"end": 2549
}
|
interface ____<T extends AbstractOAuth2AuthorizationGrantRequest> {
/**
* Exchanges the authorization grant credential, provided in the authorization grant
* request, for an access token credential at the Authorization Server's Token
* Endpoint.
* @param authorizationGrantRequest the authorization grant request that contains the
* authorization grant credential
* @return an {@link OAuth2AccessTokenResponse} that contains the
* {@link OAuth2AccessTokenResponse#getAccessToken() access token} credential
* @throws OAuth2AuthorizationException if an error occurs while attempting to
* exchange for the access token credential
*/
Mono<OAuth2AccessTokenResponse> getTokenResponse(T authorizationGrantRequest);
}
|
ReactiveOAuth2AccessTokenResponseClient
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/cache/interceptor/CacheSyncFailureTests.java
|
{
"start": 4395,
"end": 4819
}
|
class ____ implements CachingConfigurer {
@Override
@Bean
public CacheManager cacheManager() {
return CacheTestUtils.createSimpleCacheManager("testCache", "anotherTestCache");
}
@Bean
public CacheResolver testCacheResolver() {
return new NamedCacheResolver(cacheManager(), "testCache", "anotherTestCache");
}
@Bean
public SimpleService simpleService() {
return new SimpleService();
}
}
}
|
Config
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/PersonNameDoubleConstructorWithOneEmpty.java
|
{
"start": 134,
"end": 480
}
|
class ____ extends PersonName {
@SuppressWarnings("unused")
public PersonNameDoubleConstructorWithOneEmpty() {
super(null, null);
}
@SuppressWarnings("unused")
public PersonNameDoubleConstructorWithOneEmpty(String uniqueName, String name) {
super(uniqueName, name);
}
}
|
PersonNameDoubleConstructorWithOneEmpty
|
java
|
apache__camel
|
components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/CamelSpringPlainTest.java
|
{
"start": 2290,
"end": 3799
}
|
class ____ {
@Autowired
protected CamelContext camelContext;
@EndpointInject("mock:a")
protected MockEndpoint mockA;
@EndpointInject("mock:b")
protected MockEndpoint mockB;
@Produce("direct:start")
protected ProducerTemplate start;
@Test
public void testPositive() throws Exception {
assertEquals(ServiceStatus.Started, camelContext.getStatus());
mockA.expectedBodiesReceived("David");
mockB.expectedBodiesReceived("Hello David");
start.sendBody("David");
MockEndpoint.assertIsSatisfied(camelContext);
}
@Test
public void testJmx() {
assertEquals(DefaultManagementStrategy.class, camelContext.getManagementStrategy().getClass());
}
@Test
public void testShutdownTimeout() {
assertEquals(10, camelContext.getShutdownStrategy().getTimeout());
assertEquals(TimeUnit.SECONDS, camelContext.getShutdownStrategy().getTimeUnit());
}
@Test
public void testStopwatch() {
StopWatch stopWatch = StopWatchTestExecutionListener.getStopWatch();
assertNotNull(stopWatch);
assertTrue(stopWatch.taken() < 100);
}
@Test
public void testExcludedRoute() {
assertNotNull(camelContext.getRoute("excludedRoute"));
}
@Test
public void testProvidesBreakpoint() {
assertNull(camelContext.getDebugger());
}
@Test
public void testRouteCoverage() {
// noop
}
}
// end::example[]
|
CamelSpringPlainTest
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/jaxws/CxfMessageStreamExceptionTest.java
|
{
"start": 1052,
"end": 2610
}
|
class ____ extends CxfMessageCustomizedExceptionTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: onException
from("direct:start").onException(SoapFault.class).maximumRedeliveries(0).handled(true)
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
SoapFault fault = exchange
.getProperty(Exchange.EXCEPTION_CAUGHT, SoapFault.class);
exchange.getMessage().setBody(fault);
}
}).end().to(serviceURI);
// END SNIPPET: onException
// START SNIPPET: MessageStreamFault
from(routerEndpointURI).process(new Processor() {
public void process(Exchange exchange) throws Exception {
Message out = exchange.getMessage();
// Set the message body with the
out.setBody(this.getClass().getResourceAsStream("SoapFaultMessage.xml"));
// Set the response code here
out.setHeader(org.apache.cxf.message.Message.RESPONSE_CODE, Integer.valueOf(500));
}
});
// END SNIPPET: MessageStreamFault
}
};
}
}
|
CxfMessageStreamExceptionTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
|
{
"start": 2094,
"end": 2635
}
|
class ____ the parameter, used for value casting.
* @return the values of the parameter.
*/
@SuppressWarnings("unchecked")
public <V, T extends Param<V>> List<V> getValues(String name, Class<T> klass) {
List<Param<?>> multiParams = (List<Param<?>>)params.get(name);
List<V> values = Lists.newArrayList();
if (multiParams != null) {
for (Param<?> param : multiParams) {
V value = ((T) param).value();
if (value != null) {
values.add(value);
}
}
}
return values;
}
}
|
of
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/internal/EmbeddableInstantiatorPojoIndirecting.java
|
{
"start": 430,
"end": 1927
}
|
class ____ extends AbstractPojoInstantiator implements EmbeddableInstantiator {
protected final Constructor<?> constructor;
protected final int[] index;
protected EmbeddableInstantiatorPojoIndirecting(Constructor<?> constructor, int[] index) {
super( constructor.getDeclaringClass() );
this.constructor = constructor;
this.index = index;
}
public static EmbeddableInstantiatorPojoIndirecting of(
String[] propertyNames,
Constructor<?> constructor,
String[] componentNames) {
if ( componentNames == null ) {
throw new IllegalArgumentException( "Can't determine field assignment for constructor: " + constructor );
}
final int[] index = new int[componentNames.length];
return EmbeddableHelper.resolveIndex( propertyNames, componentNames, index )
? new EmbeddableInstantiatorPojoIndirectingWithGap( constructor, index )
: new EmbeddableInstantiatorPojoIndirecting( constructor, index );
}
@Override
public Object instantiate(ValueAccess valuesAccess) {
try {
final var originalValues = valuesAccess.getValues();
final var values = new Object[originalValues.length];
for ( int i = 0; i < values.length; i++ ) {
values[i] = originalValues[index[i]];
}
return constructor.newInstance( values );
}
catch ( Exception e ) {
throw new InstantiationException( "Could not instantiate entity", getMappedPojoClass(), e );
}
}
// Handles gaps, by leaving the value null for that index
private static
|
EmbeddableInstantiatorPojoIndirecting
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Concat.java
|
{
"start": 1477,
"end": 3487
}
|
class ____ extends ScalarFunction {
private final List<Expression> values;
public Concat(Source source, List<Expression> values) {
super(source, values);
this.values = values;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
TypeResolution resolution = TypeResolution.TYPE_RESOLVED;
for (Expression value : values) {
resolution = isExact(value, sourceText(), DEFAULT);
if (resolution.unresolved()) {
return resolution;
}
}
return resolution;
}
@Override
protected Pipe makePipe() {
return new ConcatFunctionPipe(source(), this, Expressions.pipe(values));
}
@Override
public boolean foldable() {
return Expressions.foldable(values);
}
@Override
public Object fold() {
return doProcess(Expressions.fold(values));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, Concat::new, values);
}
@Override
public ScriptTemplate asScript() {
List<ScriptTemplate> templates = new ArrayList<>();
for (Expression ex : children()) {
templates.add(asScript(ex));
}
StringJoiner template = new StringJoiner(",", "{eql}.concat([", "])");
ParamsBuilder params = paramsBuilder();
for (ScriptTemplate scriptTemplate : templates) {
template.add(scriptTemplate.template());
params.script(scriptTemplate.params());
}
return new ScriptTemplate(formatTemplate(template.toString()), params.build(), dataType());
}
@Override
public DataType dataType() {
return DataTypes.KEYWORD;
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new Concat(source(), newChildren);
}
}
|
Concat
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
|
{
"start": 7540,
"end": 12012
}
|
class ____ extends ESTestCase {
public void testExceptionRegistration() throws IOException, URISyntaxException {
final Set<Class<?>> notRegistered = new HashSet<>();
final Set<Class<?>> registered = new HashSet<>();
final String path = "/org/elasticsearch";
final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI())
.resolve("org")
.resolve("elasticsearch");
final Set<? extends Class<?>> ignore = Sets.newHashSet(
CancellableThreadsTests.CustomException.class,
RestResponseTests.WithHeadersException.class,
AbstractClientHeadersTestCase.InternalException.class,
ElasticsearchExceptionTests.TimeoutSubclass.class,
ElasticsearchExceptionTests.Exception4xx.class,
ElasticsearchExceptionTests.Exception5xx.class
);
FileVisitor<Path> visitor = new FileVisitor<Path>() {
private Path pkgPrefix = PathUtils.get(path).getParent();
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
pkgPrefix = pkgPrefix.resolve(dir.getFileName());
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
checkFile(file.getFileName().toString());
return FileVisitResult.CONTINUE;
}
private void checkFile(String filename) {
if (filename.endsWith(".class") == false) {
return;
}
try {
checkClass(loadClass(filename));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private void checkClass(Class<?> clazz) {
if (ignore.contains(clazz) || isAbstract(clazz.getModifiers()) || isInterface(clazz.getModifiers())) {
return;
}
if (isEsException(clazz) == false) {
return;
}
if (ElasticsearchException.isRegistered(clazz.asSubclass(Throwable.class), TransportVersion.current()) == false
&& ElasticsearchException.class.equals(clazz.getEnclosingClass()) == false) {
notRegistered.add(clazz);
} else if (ElasticsearchException.isRegistered(clazz.asSubclass(Throwable.class), TransportVersion.current())) {
registered.add(clazz);
}
}
private boolean isEsException(Class<?> clazz) {
return ElasticsearchException.class.isAssignableFrom(clazz);
}
private Class<?> loadClass(String filename) throws ClassNotFoundException {
StringBuilder pkg = new StringBuilder();
for (Path p : pkgPrefix) {
pkg.append(p.getFileName().toString()).append(".");
}
pkg.append(filename, 0, filename.length() - 6);
return getClass().getClassLoader().loadClass(pkg.toString());
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
throw exc;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) {
pkgPrefix = pkgPrefix.getParent();
return FileVisitResult.CONTINUE;
}
};
Files.walkFileTree(startPath, visitor);
final Path testStartPath = PathUtils.get(
ElasticsearchExceptionTests.class.getProtectionDomain().getCodeSource().getLocation().toURI()
).resolve("org").resolve("elasticsearch");
Files.walkFileTree(testStartPath, visitor);
assertTrue(notRegistered.remove(TestException.class));
assertTrue(notRegistered.remove(UnknownHeaderException.class));
assertTrue("Classes subclassing ElasticsearchException must be registered \n" + notRegistered, notRegistered.isEmpty());
assertTrue(registered.removeAll(ElasticsearchException.getRegisteredKeys())); // check
assertEquals(registered.toString(), 0, registered.size());
}
public static final
|
ExceptionSerializationTests
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/validation/beanvalidation/SpringValidatorAdapterTests.java
|
{
"start": 14663,
"end": 15356
}
|
class ____ {
private Integer id;
@NotNull
private String name;
@NotNull
private Integer age;
@NotNull
private Parent parent;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public Parent getParent() {
return parent;
}
public void setParent(Parent parent) {
this.parent = parent;
}
}
@Constraint(validatedBy = AnythingValidator.class)
@Retention(RetentionPolicy.RUNTIME)
public @
|
Child
|
java
|
google__auto
|
common/src/test/java/com/google/auto/common/MoreTypesTest.java
|
{
"start": 12845,
"end": 12899
}
|
class ____<T> extends Parent<T> {}
private
|
GenericChild
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/aot/generate/GeneratedClassesTests.java
|
{
"start": 1325,
"end": 7530
}
|
class ____ {
private static final Consumer<TypeSpec.Builder> emptyTypeCustomizer = type -> {};
private final GeneratedClasses generatedClasses = new GeneratedClasses(
new ClassNameGenerator(ClassName.get("com.example", "Test")));
@Test
void createWhenClassNameGeneratorIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> new GeneratedClasses(null))
.withMessage("'classNameGenerator' must not be null");
}
@Test
void addForFeatureComponentWhenFeatureNameIsEmptyThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.generatedClasses.addForFeatureComponent("",
TestComponent.class, emptyTypeCustomizer))
.withMessage("'featureName' must not be empty");
}
@Test
void addForFeatureWhenFeatureNameIsEmptyThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.generatedClasses.addForFeature("", emptyTypeCustomizer))
.withMessage("'featureName' must not be empty");
}
@Test
void addForFeatureComponentWhenTypeSpecCustomizerIsNullThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.generatedClasses
.addForFeatureComponent("test", TestComponent.class, null))
.withMessage("'type' must not be null");
}
@Test
void addForFeatureUsesDefaultTarget() {
GeneratedClass generatedClass = this.generatedClasses.addForFeature("Test", emptyTypeCustomizer);
assertThat(generatedClass.getName()).hasToString("com.example.Test__Test");
}
@Test
void addForFeatureComponentUsesTarget() {
GeneratedClass generatedClass = this.generatedClasses
.addForFeatureComponent("Test", TestComponent.class, emptyTypeCustomizer);
assertThat(generatedClass.getName().toString()).endsWith("TestComponent__Test");
}
@Test
void addForFeatureComponentWithSameNameReturnsDifferentInstances() {
GeneratedClass generatedClass1 = this.generatedClasses
.addForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses
.addForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
assertThat(generatedClass1).isNotSameAs(generatedClass2);
assertThat(generatedClass1.getName().simpleName()).endsWith("__One");
assertThat(generatedClass2.getName().simpleName()).endsWith("__One1");
}
@Test
void getOrAddForFeatureComponentWhenNewReturnsGeneratedClass() {
GeneratedClass generatedClass1 = this.generatedClasses
.getOrAddForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses
.getOrAddForFeatureComponent("two", TestComponent.class, emptyTypeCustomizer);
assertThat(generatedClass1).isNotNull().isNotEqualTo(generatedClass2);
assertThat(generatedClass2).isNotNull();
}
@Test
void getOrAddForFeatureWhenNewReturnsGeneratedClass() {
GeneratedClass generatedClass1 = this.generatedClasses
.getOrAddForFeature("one", emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses
.getOrAddForFeature("two", emptyTypeCustomizer);
assertThat(generatedClass1).isNotNull().isNotEqualTo(generatedClass2);
assertThat(generatedClass2).isNotNull();
}
@Test
void getOrAddForFeatureComponentWhenRepeatReturnsSameGeneratedClass() {
GeneratedClass generatedClass1 = this.generatedClasses
.getOrAddForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses
.getOrAddForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass3 = this.generatedClasses
.getOrAddForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
assertThat(generatedClass1).isNotNull().isSameAs(generatedClass2)
.isSameAs(generatedClass3);
}
@Test
void getOrAddForFeatureWhenRepeatReturnsSameGeneratedClass() {
GeneratedClass generatedClass1 = this.generatedClasses
.getOrAddForFeature("one", emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses
.getOrAddForFeature("one", emptyTypeCustomizer);
GeneratedClass generatedClass3 = this.generatedClasses
.getOrAddForFeature("one", emptyTypeCustomizer);
assertThat(generatedClass1).isNotNull().isSameAs(generatedClass2)
.isSameAs(generatedClass3);
}
@Test
void getOrAddForFeatureComponentWhenHasFeatureNamePrefix() {
GeneratedClasses prefixed = this.generatedClasses.withFeatureNamePrefix("prefix");
GeneratedClass generatedClass1 = this.generatedClasses.getOrAddForFeatureComponent(
"one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses.getOrAddForFeatureComponent(
"one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass3 = prefixed.getOrAddForFeatureComponent(
"one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass4 = prefixed.getOrAddForFeatureComponent(
"one", TestComponent.class, emptyTypeCustomizer);
assertThat(generatedClass1).isSameAs(generatedClass2).isNotSameAs(generatedClass3);
assertThat(generatedClass3).isSameAs(generatedClass4);
}
@Test
void writeToInvokeTypeSpecCustomizer() {
Consumer<TypeSpec.Builder> typeSpecCustomizer = mock();
this.generatedClasses.addForFeatureComponent("one", TestComponent.class, typeSpecCustomizer);
verifyNoInteractions(typeSpecCustomizer);
InMemoryGeneratedFiles generatedFiles = new InMemoryGeneratedFiles();
this.generatedClasses.writeTo(generatedFiles);
verify(typeSpecCustomizer).accept(any());
assertThat(generatedFiles.getGeneratedFiles(Kind.SOURCE)).hasSize(1);
}
@Test
void withNameUpdatesNamingConventions() {
GeneratedClass generatedClass1 = this.generatedClasses
.addForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
GeneratedClass generatedClass2 = this.generatedClasses.withFeatureNamePrefix("Another")
.addForFeatureComponent("one", TestComponent.class, emptyTypeCustomizer);
assertThat(generatedClass1.getName().toString()).endsWith("TestComponent__One");
assertThat(generatedClass2.getName().toString()).endsWith("TestComponent__AnotherOne");
}
private static
|
GeneratedClassesTests
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/main/java/io/github/resilience4j/core/functions/Either.java
|
{
"start": 1102,
"end": 4997
}
|
interface ____<L, R> {
/**
* Constructs a {@link Right}
*
* @param right The value.
* @param <L> Type of left value.
* @param <R> Type of right value.
* @return A new {@code Right} instance.
*/
static <L, R> Either<L, R> right(R right) {
return new Right<>(right);
}
/**
* Constructs a {@link Left}
*
* @param left The value.
* @param <L> Type of left value.
* @param <R> Type of right value.
* @return A new {@code Left} instance.
*/
static <L, R> Either<L, R> left(L left) {
return new Left<>(left);
}
/**
* Returns the left value.
*
* @return The left value.
* @throws NoSuchElementException if this is a {@code Right}.
*/
L getLeft();
/**
* Returns whether this Either is a Left.
*
* @return true, if this is a Left, false otherwise
*/
boolean isLeft();
/**
* Returns whether this Either is a Right.
*
* @return true, if this is a Right, false otherwise
*/
boolean isRight();
/**
* Gets the right value if this is a {@code Right} or throws if this is a {@code Left}.
*
* @return the right value
* @throws NoSuchElementException if this is a {@code Left}.
*/
R get();
/**
* Converts a {@code Left} to a {@code Right} vice versa by wrapping the value in a new type.
*
* @return a new {@code Either}
*/
default Either<R, L> swap() {
if (isRight()) {
return new Left<>(get());
} else {
return new Right<>(getLeft());
}
}
default boolean isEmpty() {
return isLeft();
}
default R getOrNull() {
return isEmpty() ? null : get();
}
/**
* Maps the value of this Either if it is a Left, performs no operation if this is a Right.
* @param leftMapper A mapper
* @param <U> Component type of the mapped right value
* @return a mapped {@code Monad}
* @throws NullPointerException if {@code mapper} is null
*/
@SuppressWarnings("unchecked")
default <U> Either<U, R> mapLeft(Function<? super L, ? extends U> leftMapper) {
Objects.requireNonNull(leftMapper, "leftMapper is null");
if (isLeft()) {
return Either.left(leftMapper.apply(getLeft()));
} else {
return (Either<U, R>) this;
}
}
/**
* Maps the left value if the projected Either is a Left.
*
* @param mapper A mapper which takes a left value and returns a value of type U
* @param <U> The new type of a Left value
* @return A new LeftProjection
*/
@SuppressWarnings("unchecked")
default <U> Either<L, U> map(Function<? super R, ? extends U> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
if (isRight()) {
return Either.right(mapper.apply(get()));
} else {
return (Either<L, U>) this;
}
}
/**
* Folds either the left or the right side of this disjunction.
*
* @param leftMapper maps the left value if this is a Left
* @param rightMapper maps the right value if this is a Right
* @param <U> type of the folded value
* @return A value of type U
*/
default <U> U fold(Function<? super L, ? extends U> leftMapper, Function<? super R, ? extends U> rightMapper) {
Objects.requireNonNull(leftMapper, "leftMapper is null");
Objects.requireNonNull(rightMapper, "rightMapper is null");
if (isRight()) {
return rightMapper.apply(get());
} else {
return leftMapper.apply(getLeft());
}
}
/**
* The {@code Right} version of an {@code Either}.
*
* @param <L> left component type
* @param <R> right component type
*/
final
|
Either
|
java
|
elastic__elasticsearch
|
plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java
|
{
"start": 1180,
"end": 1661
}
|
class ____ that the attribute is set, but we don't want that here
public void testAvailabilityZoneAttribute() throws IOException {
final var nodesInfoResponse = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_nodes/_all/_none")));
for (final var nodeId : nodesInfoResponse.evaluateMapKeys("nodes")) {
assertNull(nodesInfoResponse.evaluateExact("nodes", nodeId, "attributes", "aws_availability_zone"));
}
}
}
|
asserts
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java
|
{
"start": 4169,
"end": 4738
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) {
this.source = source;
this.val = val;
}
@Override
public Log10IntEvaluator get(DriverContext context) {
return new Log10IntEvaluator(source, val.get(context), context);
}
@Override
public String toString() {
return "Log10IntEvaluator[" + "val=" + val + "]";
}
}
}
|
Factory
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/BeforeAndAfterTestExecutionCallbackTests.java
|
{
"start": 1596,
"end": 10242
}
|
class ____ extends AbstractJupiterTestEngineTests {
private static final List<String> callSequence = new ArrayList<>();
private static @Nullable Optional<Throwable> actualExceptionInAfterTestExecution;
@SuppressWarnings("OptionalAssignedToNull")
@BeforeEach
void resetCallSequence() {
callSequence.clear();
actualExceptionInAfterTestExecution = null;
}
@Test
void beforeAndAfterTestExecutionCallbacks() {
EngineExecutionResults executionResults = executeTestsForClass(OuterTestCase.class);
assertEquals(2, executionResults.testEvents().started().count(), "# tests started");
assertEquals(2, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(0, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
// OuterTestCase
"beforeEachMethodOuter",
"fooBeforeTestExecutionCallback",
"barBeforeTestExecutionCallback",
"testOuter",
"barAfterTestExecutionCallback",
"fooAfterTestExecutionCallback",
"afterEachMethodOuter",
// InnerTestCase
"beforeEachMethodOuter",
"beforeEachMethodInner",
"fooBeforeTestExecutionCallback",
"barBeforeTestExecutionCallback",
"fizzBeforeTestExecutionCallback",
"testInner",
"fizzAfterTestExecutionCallback",
"barAfterTestExecutionCallback",
"fooAfterTestExecutionCallback",
"afterEachMethodInner",
"afterEachMethodOuter"
), callSequence, "wrong call sequence");
// @formatter:on
}
@Test
void beforeAndAfterTestExecutionCallbacksDeclaredOnSuperclassAndSubclass() {
EngineExecutionResults executionResults = executeTestsForClass(ChildTestCase.class);
assertEquals(1, executionResults.testEvents().started().count(), "# tests started");
assertEquals(1, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(0, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
"fooBeforeTestExecutionCallback",
"barBeforeTestExecutionCallback",
"testChild",
"barAfterTestExecutionCallback",
"fooAfterTestExecutionCallback"
), callSequence, "wrong call sequence");
// @formatter:on
}
@Test
void beforeAndAfterTestExecutionCallbacksDeclaredOnInterfaceAndClass() {
EngineExecutionResults executionResults = executeTestsForClass(TestInterfaceTestCase.class);
assertEquals(2, executionResults.testEvents().started().count(), "# tests started");
assertEquals(2, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(0, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
// Test Interface
"fooBeforeTestExecutionCallback",
"barBeforeTestExecutionCallback",
"defaultTestMethod",
"barAfterTestExecutionCallback",
"fooAfterTestExecutionCallback",
// Test Class
"fooBeforeTestExecutionCallback",
"barBeforeTestExecutionCallback",
"localTestMethod",
"barAfterTestExecutionCallback",
"fooAfterTestExecutionCallback"
), callSequence, "wrong call sequence");
// @formatter:on
}
@Test
void beforeEachMethodThrowsAnException() {
EngineExecutionResults executionResults = executeTestsForClass(ExceptionInBeforeEachMethodTestCase.class);
assertEquals(1, executionResults.testEvents().started().count(), "# tests started");
assertEquals(0, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(1, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
"beforeEachMethod", // throws an exception.
// fooBeforeTestExecutionCallback should not get invoked.
// test should not get invoked.
// fooAfterTestExecutionCallback should not get invoked.
"afterEachMethod"
), callSequence, "wrong call sequence");
// @formatter:on
assertNull(actualExceptionInAfterTestExecution,
"test exception (fooAfterTestExecutionCallback should not have been called)");
}
@Test
void beforeTestExecutionCallbackThrowsAnException() {
EngineExecutionResults executionResults = executeTestsForClass(
ExceptionInBeforeTestExecutionCallbackTestCase.class);
assertEquals(1, executionResults.testEvents().started().count(), "# tests started");
assertEquals(0, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(1, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
"beforeEachMethod",
"fooBeforeTestExecutionCallback",
"exceptionThrowingBeforeTestExecutionCallback", // throws an exception.
// barBeforeTestExecutionCallback should not get invoked.
// test() should not get invoked.
"barAfterTestExecutionCallback",
"fooAfterTestExecutionCallback",
"afterEachMethod"
), callSequence, "wrong call sequence");
// @formatter:on
assertNotNull(actualExceptionInAfterTestExecution, "test exception");
assertTrue(actualExceptionInAfterTestExecution.isPresent(), "test exception should be present");
assertEquals(EnigmaException.class, actualExceptionInAfterTestExecution.get().getClass());
}
@Test
void afterTestExecutionCallbackThrowsAnException() {
EngineExecutionResults executionResults = executeTestsForClass(
ExceptionInAfterTestExecutionCallbackTestCase.class);
assertEquals(1, executionResults.testEvents().started().count(), "# tests started");
assertEquals(0, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(1, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
"beforeEachMethod",
"fooBeforeTestExecutionCallback",
"barBeforeTestExecutionCallback",
"test",
"barAfterTestExecutionCallback",
"exceptionThrowingAfterTestExecutionCallback", // throws an exception.
"fooAfterTestExecutionCallback",
"afterEachMethod"
), callSequence, "wrong call sequence");
// @formatter:on
assertNotNull(actualExceptionInAfterTestExecution, "test exception");
assertTrue(actualExceptionInAfterTestExecution.isPresent(), "test exception should be present");
assertEquals(EnigmaException.class, actualExceptionInAfterTestExecution.get().getClass());
}
@Test
void testMethodThrowsAnException() {
EngineExecutionResults executionResults = executeTestsForClass(ExceptionInTestMethodTestCase.class);
assertEquals(1, executionResults.testEvents().started().count(), "# tests started");
assertEquals(0, executionResults.testEvents().succeeded().count(), "# tests succeeded");
assertEquals(0, executionResults.testEvents().skipped().count(), "# tests skipped");
assertEquals(0, executionResults.testEvents().aborted().count(), "# tests aborted");
assertEquals(1, executionResults.testEvents().failed().count(), "# tests failed");
// @formatter:off
assertEquals(asList(
"beforeEachMethod",
"fooBeforeTestExecutionCallback",
"test", // throws an exception.
"fooAfterTestExecutionCallback",
"afterEachMethod"
), callSequence, "wrong call sequence");
// @formatter:on
assertNotNull(actualExceptionInAfterTestExecution, "test exception");
assertTrue(actualExceptionInAfterTestExecution.isPresent(), "test exception should be present");
assertEquals(EnigmaException.class, actualExceptionInAfterTestExecution.get().getClass());
}
// -------------------------------------------------------------------------
@ExtendWith(FooTestExecutionCallbacks.class)
static
|
BeforeAndAfterTestExecutionCallbackTests
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/event/connection/ConnectionEventSupport.java
|
{
"start": 177,
"end": 1913
}
|
class ____ implements ConnectionEvent {
private final String redisUri;
private final String epId;
private final String channelId;
private final SocketAddress local;
private final SocketAddress remote;
ConnectionEventSupport(SocketAddress local, SocketAddress remote) {
this(null, null, null, local, remote);
}
ConnectionEventSupport(String redisUri, String epId, String channelId, SocketAddress local, SocketAddress remote) {
LettuceAssert.notNull(local, "Local must not be null");
LettuceAssert.notNull(remote, "Remote must not be null");
this.redisUri = redisUri;
this.epId = epId;
this.channelId = channelId;
this.local = local;
this.remote = remote;
}
/**
* Returns the local address.
*
* @return the local address
*/
public SocketAddress localAddress() {
return local;
}
/**
* Returns the remote address.
*
* @return the remote address
*/
public SocketAddress remoteAddress() {
return remote;
}
/**
* @return the underlying Redis URI.
*/
String getRedisUri() {
return redisUri;
}
/**
* @return endpoint identifier.
*/
String getEpId() {
return epId;
}
/**
* @return channel identifier.
*/
String getChannelId() {
return channelId;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" [");
sb.append(local);
sb.append(" -> ").append(remote);
sb.append(']');
return sb.toString();
}
}
|
ConnectionEventSupport
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 23532,
"end": 23849
}
|
class ____ {",
" public Baz create() {",
" return new AutoValue_Baz();",
" }",
"}");
Compilation compilation =
javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining("@AutoValue
|
Baz
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-tos/src/test/java/org/apache/hadoop/fs/tosfs/object/TestObjectStorage.java
|
{
"start": 3018,
"end": 57407
}
|
class ____ {
private static final String FILE_STORE_ROOT = TempFiles.newTempDir("TestObjectStorage");
private ObjectStorage storage;
public static Stream<Arguments> provideArguments() {
assumeTrue(TestEnv.checkTestEnabled());
List<Arguments> values = new ArrayList<>();
for (ObjectStorage store : TestUtility.createTestObjectStorage(FILE_STORE_ROOT)) {
values.add(Arguments.of(store));
}
return values.stream();
}
private void setEnv(ObjectStorage objectStore) {
this.storage = objectStore;
}
@AfterEach
public void tearDown() {
CommonUtils.runQuietly(() -> storage.deleteAll(""));
for (MultipartUpload upload : storage.listUploads("")) {
storage.abortMultipartUpload(upload.key(), upload.uploadId());
}
}
@AfterAll
public static void afterClass() throws Exception {
CommonUtils.runQuietly(() -> TempFiles.deleteDir(FILE_STORE_ROOT));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testHeadNonExistObject(ObjectStorage store) {
setEnv(store);
assertNull(storage.head("a/b/c.txt"));
byte[] data = TestUtility.rand(256);
storage.put("a/b/c.txt", data);
assertNotNull(storage.head("a/b/c.txt"));
assertNull(storage.head("a/b/c/non-exits"));
if (storage.bucket().isDirectory()) {
assertThrows(InvalidObjectKeyException.class, () -> storage.head("a/b/c.txt/non-exits"));
} else {
assertNull(storage.head("a/b/c.txt/non-exits"));
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testHeadExistObject(ObjectStorage store) {
setEnv(store);
byte[] data = TestUtility.rand(256);
String key = "testHeadExistObject.txt";
storage.put(key, data);
ObjectInfo obj = storage.head(key);
assertEquals(key, obj.key());
assertFalse(obj.isDir());
if (storage.bucket().isDirectory()) {
assertThrows(InvalidObjectKeyException.class, () -> storage.head(key + "/"));
} else {
assertNull(storage.head(key + "/"));
}
String dirKey = "testHeadExistObject/";
storage.put(dirKey, new byte[0]);
obj = storage.head(dirKey);
assertEquals(dirKey, obj.key());
assertTrue(obj.isDir());
if (storage.bucket().isDirectory()) {
obj = storage.head("testHeadExistObject");
assertEquals("testHeadExistObject", obj.key());
assertTrue(obj.isDir());
} else {
assertNull(storage.head("testHeadExistObject"));
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetAndDeleteNonExistFile(ObjectStorage store) {
setEnv(store);
// ensure file is not exist
assertNull(storage.head("a/b/c.txt"));
assertThrows(RuntimeException.class, () -> storage.get("a/b/c.txt", 0, 0));
assertThrows(RuntimeException.class, () -> storage.get("a/b/c.txt", 0, 1));
// Allow to delete a non-exist object.
storage.delete("a/b/c.txt");
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testPutAndDeleteFileWithEmptyKey(ObjectStorage store) {
setEnv(store);
assertThrows(RuntimeException.class, () -> storage.put("", new byte[0]));
assertThrows(RuntimeException.class, () -> storage.put(null, new byte[0]));
assertThrows(RuntimeException.class, () -> storage.delete(null));
assertThrows(RuntimeException.class, () -> storage.head(""));
assertThrows(RuntimeException.class, () -> storage.head(null));
assertThrows(RuntimeException.class, () -> getStream(""));
assertThrows(RuntimeException.class, () -> getStream(null));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testPutObjectButContentLengthDisMatch(ObjectStorage store) throws IOException {
setEnv(store);
byte[] data = TestUtility.rand(256);
String key = "a/truncated.txt";
// The final object data will be truncated if content length is smaller.
byte[] checksum = storage.put(key, () -> new ByteArrayInputStream(data), 200);
assertArrayEquals(Arrays.copyOfRange(data, 0, 200), IOUtils.toByteArray(getStream(key)));
ObjectInfo info = storage.head(key);
assertEquals(key, info.key());
assertEquals(200, info.size());
assertArrayEquals(checksum, info.checksum());
// Will create object failed is the content length is bigger.
assertThrows(RuntimeException.class,
() -> storage.put(key, () -> new ByteArrayInputStream(data), 300));
}
private InputStream getStream(String key) {
return storage.get(key).stream();
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testPutAndGetFile(ObjectStorage store) throws IOException {
setEnv(store);
byte[] data = TestUtility.rand(256);
String key = "a/test.txt";
byte[] checksum = storage.put(key, data);
assertArrayEquals(data, IOUtils.toByteArray(getStream(key)));
if (storage.bucket().isDirectory()) {
// Directory bucket will create missed parent dir.
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream("a")));
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream("a/")));
} else {
assertNull(storage.head("a"));
assertNull(storage.head("a/"));
}
ObjectInfo info = storage.head(key);
assertEquals(key, info.key());
assertEquals(data.length, info.size());
assertArrayEquals(checksum, info.checksum());
ObjectContent content = storage.get(key);
assertArrayEquals(info.checksum(), content.checksum());
assertArrayEquals(data, IOUtils.toByteArray(content.stream()));
assertArrayEquals(data, IOUtils.toByteArray(getStream(key, 0, -1)));
assertThrows(RuntimeException.class, () -> storage.get(key, -1, -1), "offset is negative");
assertThrows(RuntimeException.class, () -> storage.get(key + "/", 0, -1),
"path not found or resource type is invalid");
assertArrayEquals(data, IOUtils.toByteArray(getStream(key, 0, 256)));
assertArrayEquals(data, IOUtils.toByteArray(getStream(key, 0, 512)));
byte[] secondHalfData = Arrays.copyOfRange(data, 128, 256);
assertArrayEquals(secondHalfData, IOUtils.toByteArray(getStream(key, 128, -1)));
assertArrayEquals(secondHalfData, IOUtils.toByteArray(getStream(key, 128, 256)));
assertArrayEquals(secondHalfData, IOUtils.toByteArray(getStream(key, 128, 257)));
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream(key, 128, 0)));
ObjectContent partContent = storage.get(key, 8, 32);
assertArrayEquals(info.checksum(), partContent.checksum());
assertArrayEquals(Arrays.copyOfRange(data, 8, 40),
IOUtils.toByteArray(partContent.stream()));
assertArrayEquals(data, IOUtils.toByteArray(getStream(key)));
assertThrows(RuntimeException.class, () -> storage.get(key, 257, 8),
"offset is bigger than object length");
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream(key, 256, 8)));
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream(key, 0, 0)));
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream(key, 1, 0)));
// assert the original data is not changed during random get request
assertArrayEquals(data, IOUtils.toByteArray(getStream(key)));
storage.delete(key);
assertNull(storage.head(key));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testAppendAndGetFile(ObjectStorage store) throws Exception {
setEnv(store);
String key = "a/testAppendAndGetFile.txt";
// Append zero bytes.
assertThrows(NotAppendableException.class, () -> storage.append(key, new byte[0]),
"Append non-existed object with zero byte is not supported.");
// Append 256 bytes.
byte[] data = TestUtility.rand(256);
byte[] checksum = storage.append(key, data);
assertArrayEquals(data, IOUtils.toByteArray(getStream(key)));
// Append zero bytes.
byte[] newChecksum = storage.append(key, new byte[0]);
assertArrayEquals(checksum, newChecksum);
checksum = newChecksum;
// Append one byte.
newChecksum = storage.append(key, new byte[1]);
assertFalse(Arrays.equals(checksum, newChecksum));
assertArrayEquals(newChecksum, storage.head(key).checksum());
checksum = newChecksum;
// Append 1024 byte.
data = TestUtility.rand(1024);
newChecksum = storage.append(key, data);
assertFalse(Arrays.equals(checksum, newChecksum));
assertArrayEquals(newChecksum, storage.head(key).checksum());
storage.delete(key);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testAppendLengthNotMatch(ObjectStorage store) {
setEnv(store);
byte[] data = TestUtility.rand(256);
String key = "a/testAppendLengthNotMatch.txt";
storage.append(key, () -> new ByteArrayInputStream(data), 128);
assertEquals(128, storage.head(key).size());
assertThrows(RuntimeException.class,
() -> storage.append(key, () -> new ByteArrayInputStream(data), 1024),
"Expect unexpected end of stream error.");
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testHeadAndListAndObjectStatusShouldGetSameObjectInfo(ObjectStorage store) {
setEnv(store);
String key = "testHeadAndListObjectCheckSum.txt";
byte[] data = TestUtility.rand(256);
byte[] checksum = storage.put(key, data);
ObjectInfo obj = storage.head(key);
assertEquals(obj, storage.objectStatus(key));
if (!storage.bucket().isDirectory()) {
List<ObjectInfo> objects = toList(storage.list(key, null, 1));
assertEquals(1, objects.size());
assertEquals(obj, objects.get(0));
assertArrayEquals(checksum, objects.get(0).checksum());
}
key = "testHeadAndListObjectCheckSum/";
checksum = storage.put(key, new byte[0]);
obj = storage.head(key);
assertEquals(obj, storage.objectStatus(key));
if (!storage.bucket().isDirectory()) {
List<ObjectInfo> objects = toList(storage.list(key, null, 1));
assertEquals(1, objects.size());
assertEquals(obj, objects.get(0));
assertArrayEquals(checksum, objects.get(0).checksum());
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testObjectStatus(ObjectStorage store) {
setEnv(store);
// test get file status
String key = "a/b/testObjectStatus.txt";
byte[] data = TestUtility.rand(256);
byte[] checksum = storage.put(key, data);
ObjectInfo obj = storage.head(key);
assertArrayEquals(checksum, obj.checksum());
assertEquals(obj, storage.objectStatus(key));
if (storage.bucket().isDirectory()) {
assertThrows(InvalidObjectKeyException.class, () -> storage.head(key + "/"));
assertThrows(InvalidObjectKeyException.class, () -> storage.objectStatus(key + "/"));
} else {
assertNull(storage.head(key + "/"));
assertNull(storage.objectStatus(key + "/"));
}
// test get dir status
String dirKey = "a/b/dir/";
checksum = storage.put(dirKey, new byte[0]);
obj = storage.head(dirKey);
assertEquals(Constants.MAGIC_CHECKSUM, checksum);
assertArrayEquals(Constants.MAGIC_CHECKSUM, checksum);
assertArrayEquals(checksum, obj.checksum());
assertTrue(obj.isDir());
assertEquals(dirKey, obj.key());
assertEquals(obj, storage.objectStatus(dirKey));
if (storage.bucket().isDirectory()) {
assertNotNull(storage.head("a/b/dir"));
assertEquals("a/b/dir", storage.objectStatus("a/b/dir").key());
} else {
assertNull(storage.head("a/b/dir"));
assertEquals(dirKey, storage.objectStatus("a/b/dir").key());
}
// test get dir status of prefix
String prefix = "a/b/";
obj = storage.objectStatus(prefix);
assertEquals(prefix, obj.key());
assertEquals(Constants.MAGIC_CHECKSUM, obj.checksum());
assertTrue(obj.isDir());
if (storage.bucket().isDirectory()) {
assertEquals(obj, storage.head(prefix));
assertEquals("a/b", storage.objectStatus("a/b").key());
} else {
assertNull(storage.head(prefix));
assertEquals(prefix, storage.objectStatus("a/b").key());
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testPutAndGetDirectory(ObjectStorage store) throws IOException {
setEnv(store);
String key = "a/b/";
byte[] data = new byte[0];
storage.put(key, data);
ObjectInfo info = storage.head(key);
assertEquals(key, info.key());
assertEquals(data.length, info.size());
assertArrayEquals(data, IOUtils.toByteArray(getStream(key)));
assertArrayEquals(data, IOUtils.toByteArray(getStream(key, 0, 256)));
// test create the same dir again
storage.put(key, data);
storage.delete(key);
assertNull(storage.head(key));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testOverwriteFile(ObjectStorage store) throws IOException {
setEnv(store);
String key = "a/test.txt";
byte[] data1 = TestUtility.rand(256);
byte[] data2 = TestUtility.rand(128);
storage.put(key, data1);
assertArrayEquals(data1, IOUtils.toByteArray(getStream(key, 0, -1)));
storage.put(key, data2);
assertArrayEquals(data2, IOUtils.toByteArray(getStream(key, 0, -1)));
storage.delete(key);
assertNull(storage.head(key));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListObjectsWithEmptyDelimiters(ObjectStorage store) {
setEnv(store);
// Directory bucket only supports list with delimiter = '/' currently.
assumeFalse(storage.bucket().isDirectory());
String key1 = "a/b/c/d";
String key2 = "a/b";
byte[] data = TestUtility.rand(256);
for (int i = 0; i < 10; i++) {
storage.put(String.format("%s/file-%d.txt", key1, i), data);
storage.put(String.format("%s/file-%d.txt", key2, i), data);
}
// list 100 objects under 'a/', there are total 20 objects.
ListObjectsResponse response = list("a/", "", 100, "");
assertEquals(20, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/c/d/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-9.txt", response.objects().get(19).key());
// list 20 objects and there only have 20 objects under 'a/'
response = list("a/", "", 20, "");
assertEquals(20, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/c/d/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-9.txt", response.objects().get(19).key());
// list the top 10 objects among 20 objects
response = list("a/", "", 10, "");
assertEquals(10, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/c/d/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/c/d/file-9.txt", response.objects().get(9).key());
// list the next 5 objects behind a/b/c/d/file-9.txt among 20 objects
response = list("a/", "a/b/c/d/file-9.txt", 5, "");
assertEquals(5, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-4.txt", response.objects().get(4).key());
// list the next 10 objects behind a/b/c/d/file-9.txt among 20 objects
response = list("a/", "a/b/c/d/file-9.txt", 10, "");
assertEquals(10, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-9.txt", response.objects().get(9).key());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListEmptyDirWithSlashDelimiter(ObjectStorage store) {
setEnv(store);
String key = "a/b/";
storage.put(key, new byte[0]);
ListObjectsResponse response = list(key, null, 10, "/");
assertEquals(1, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/", response.objects().get(0).key());
response = list(key, key, 10, "/");
assertEquals(0, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testDeleteMultipleKeys(ObjectStorage store) {
setEnv(store);
String prefix = "a/b";
byte[] data = TestUtility.rand(256);
List<String> keys = Lists.newArrayList();
for (int i = 0; i < 50; i++) {
String existingKey = String.format("%s/existing-file-%d.txt", prefix, i);
storage.put(existingKey, data);
keys.add(existingKey);
String unExistingKey = String.format("%s/unExisting-file-%d.txt", prefix, i);
keys.add(unExistingKey);
}
List<String> failedKeys = storage.batchDelete(keys);
for (String key : failedKeys) {
assertNotNull(storage.head(key));
}
for (String key : keys) {
if (!failedKeys.contains(key)) {
assertNull(storage.head(key));
}
}
assertThrows(IllegalArgumentException.class, () -> storage.batchDelete(
IntStream.range(0, 1001).mapToObj(String::valueOf).collect(Collectors.toList())),
"The deleted keys size should be <= 1000");
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListObjectsWithEmptyMarkers(ObjectStorage store) {
setEnv(store);
String key1 = "a/b/c/d";
String key2 = "a/b";
String key3 = "a1/b1";
// create the folder to compatible with directory bucket.
storage.put("a/", new byte[0]);
storage.put("a/b/", new byte[0]);
storage.put("a/b/c/", new byte[0]);
storage.put("a/b/c/d/", new byte[0]);
storage.put("a1/", new byte[0]);
storage.put("a1/b1/", new byte[0]);
byte[] data = TestUtility.rand(256);
for (int i = 0; i < 10; i++) {
storage.put(String.format("%s/file-%d.txt", key1, i), data);
storage.put(String.format("%s/file-%d.txt", key2, i), data);
storage.put(String.format("%s/file-%d.txt", key3, i), data);
}
// group objects by '/' under 'a/'
ListObjectsResponse response = list("a/", null, 100, "/");
assertEquals(1, response.objects().size());
assertEquals("a/", response.objects().get(0).key());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
response = list("a", null, 100, "/");
assertEquals(0, response.objects().size());
assertEquals(2, response.commonPrefixes().size());
assertEquals("a/", response.commonPrefixes().get(0));
assertEquals("a1/", response.commonPrefixes().get(1));
// group objects by '/' under 'a/b/' and group objects by 'b/' under 'a', they are same
response = list("a/b/", null, 100, "/");
assertEquals(11, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/", response.objects().get(0).key());
assertEquals("a/b/file-0.txt", response.objects().get(1).key());
assertEquals("a/b/file-9.txt", response.objects().get(10).key());
response = list("a/b", null, 100, "/");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
if (!storage.bucket().isDirectory()) {
// Directory bucket only supports list with delimiter = '/' currently.
response = list("a", null, 100, "b/");
assertEquals(13, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a/", response.objects().get(0).key());
assertEquals("a1/", response.objects().get(1).key());
assertEquals("a1/b1/", response.objects().get(2).key());
assertEquals("a1/b1/file-0.txt", response.objects().get(3).key());
assertEquals("a1/b1/file-9.txt", response.objects().get(12).key());
response = list("a/", null, 100, "b/");
assertEquals(1, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a/", response.objects().get(0).key());
}
// group objects by different delimiter under 'a/b/c/d/' or 'a/b/c/d'
response = list("a/b/c/d/", null, 100, "/");
assertEquals(11, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/c/d/", response.objects().get(0).key());
response = list("a/b/c/d/", null, 5, "/");
assertEquals(5, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a/b/c/d/", response.objects().get(0).key());
response = list("a/b/c/d", null, 100, "/");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/c/d/", response.commonPrefixes().get(0));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListObjectWithLimitObjectAndCommonPrefixes(ObjectStorage store) {
setEnv(store);
String key1 = "a/b/c/d";
String key2 = "a/b";
String key3 = "a1/b1";
byte[] data = TestUtility.rand(256);
for (int i = 0; i < 10; i++) {
storage.put(String.format("%s/file-%d.txt", key1, i), data);
storage.put(String.format("%s/file-%d.txt", key2, i), data);
storage.put(String.format("%s/file-%d.txt", key3, i), data);
}
List<String> dirKeys = Lists.newArrayList("a/b/d/", "a/b/e/", "a/b/f/", "a/b/g/");
for (String key : dirKeys) {
storage.put(key, new byte[0]);
}
// group objects by '/' under 'a/b/', and limit top 5 objects among 10 objects and 1 common
// prefix.
ListObjectsResponse response = list("a/b/", "a/b/", 5, "/");
assertEquals(1, response.objects().size());
assertEquals(4, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/d/", response.commonPrefixes().get(1));
assertEquals("a/b/e/", response.commonPrefixes().get(2));
assertEquals("a/b/f/", response.commonPrefixes().get(3));
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
response = list("a/b/", "a/b/", 14, "/");
assertEquals(10, response.objects().size());
assertEquals(4, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/d/", response.commonPrefixes().get(1));
assertEquals("a/b/e/", response.commonPrefixes().get(2));
assertEquals("a/b/f/", response.commonPrefixes().get(3));
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-9.txt", response.objects().get(9).key());
response = list("a/b/", "a/b/", 15, "/");
assertEquals(10, response.objects().size());
assertEquals(5, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/d/", response.commonPrefixes().get(1));
assertEquals("a/b/e/", response.commonPrefixes().get(2));
assertEquals("a/b/f/", response.commonPrefixes().get(3));
assertEquals("a/b/g/", response.commonPrefixes().get(4));
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-9.txt", response.objects().get(9).key());
// a/b/h-file-0.txt is behind from a/b/g/
storage.put("a/b/h-file-0.txt", data);
response = list("a/b/", "a/b/", 15, "/");
assertEquals(10, response.objects().size());
assertEquals(5, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/d/", response.commonPrefixes().get(1));
assertEquals("a/b/e/", response.commonPrefixes().get(2));
assertEquals("a/b/f/", response.commonPrefixes().get(3));
assertEquals("a/b/g/", response.commonPrefixes().get(4));
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/file-9.txt", response.objects().get(9).key());
response = list("a/b/", "a/b/", 20, "/");
assertEquals(11, response.objects().size());
assertEquals(5, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/d/", response.commonPrefixes().get(1));
assertEquals("a/b/e/", response.commonPrefixes().get(2));
assertEquals("a/b/f/", response.commonPrefixes().get(3));
assertEquals("a/b/g/", response.commonPrefixes().get(4));
assertEquals("a/b/file-0.txt", response.objects().get(0).key());
assertEquals("a/b/h-file-0.txt", response.objects().get(10).key());
response = list("a/b/", "a/b/", 1, "/");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
response = list("a/b/", "a/b/", 2, "/");
assertEquals(0, response.objects().size());
assertEquals(2, response.commonPrefixes().size());
assertEquals("a/b/c/", response.commonPrefixes().get(0));
assertEquals("a/b/d/", response.commonPrefixes().get(1));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListedIteratorIsIdempotent(ObjectStorage store) {
setEnv(store);
String key1 = "a/b/c/d";
byte[] data = TestUtility.rand(256);
for (int i = 0; i < 10; i++) {
storage.put(String.format("%s/file-%d.txt", key1, i), data);
}
Iterable<ObjectInfo> res;
if (storage.bucket().isDirectory()) {
res = ((DirectoryStorage) storage).listDir("a/b/c/d/", true);
} else {
res = storage.list("a/b/c/d/", "a/b/c/d/", 10);
}
Iterator<ObjectInfo> batch1 = res.iterator();
Iterator<ObjectInfo> batch2 = res.iterator();
for (int i = 0; i < 10; i++) {
assertTrue(batch1.hasNext());
ObjectInfo obj = batch1.next();
assertEquals(String.format("a/b/c/d/file-%d.txt", i), obj.key());
}
assertFalse(batch1.hasNext());
for (int i = 0; i < 10; i++) {
assertTrue(batch2.hasNext());
ObjectInfo obj = batch2.next();
assertEquals(String.format("a/b/c/d/file-%d.txt", i), obj.key());
}
assertFalse(batch2.hasNext());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListObjectsWithSmallBatch(ObjectStorage store) {
setEnv(store);
assumeFalse(storage.bucket().isDirectory());
String key1 = "a/b/c/d/";
byte[] data = TestUtility.rand(256);
for (int i = 0; i < 10; i++) {
storage.put(String.format("%sfile-%d.txt", key1, i), data);
}
// change list object count
Configuration newConf = new Configuration(storage.conf());
newConf.setInt(TosKeys.FS_TOS_LIST_OBJECTS_COUNT, 5);
storage.initialize(newConf, storage.bucket().name());
List<Integer> maxKeys = Arrays.asList(5, 10, 9, 20, -1);
for (int maxKey : maxKeys) {
Iterator<ObjectInfo> objs = storage.list(key1, key1, maxKey).iterator();
int end = Math.min(maxKey == -1 ? 10 : maxKey, 10);
for (int i = 0; i < end; i++) {
assertTrue(objs.hasNext());
ObjectInfo obj = objs.next();
assertEquals(String.format("a/b/c/d/file-%d.txt", i), obj.key());
}
assertFalse(objs.hasNext());
}
// reset list object count
newConf = new Configuration(storage.conf());
newConf.setInt(TosKeys.FS_TOS_LIST_OBJECTS_COUNT, 1000);
storage.initialize(newConf, storage.bucket().name());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListObjectsWithSpecificDelimiters(ObjectStorage store) {
setEnv(store);
assumeFalse(storage.bucket().isDirectory());
String key1 = "a/b/c/d";
String key2 = "a/b";
String key3 = "a1/b1";
byte[] data = TestUtility.rand(256);
for (int i = 0; i < 10; i++) {
storage.put(String.format("%s/file-%d.txt", key1, i), data);
storage.put(String.format("%s/file-%d.txt", key2, i), data);
storage.put(String.format("%s/file-%d.txt", key3, i), data);
}
ListObjectsResponse response = list("a", "", 11, "b/");
assertEquals(10, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a1/b1/file-0.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-9.txt", response.objects().get(9).key());
response = list("a", "", 5, "b/");
assertEquals(4, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a1/b1/file-0.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-3.txt", response.objects().get(3).key());
response = list("a", "a1/b1/file-3.txt", 5, "b/");
assertEquals(5, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a1/b1/file-4.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-8.txt", response.objects().get(4).key());
response = list("a", "a1/b1/file-3.txt", 6, "b/");
assertEquals(6, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
assertEquals("a1/b1/file-4.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-9.txt", response.objects().get(5).key());
response = list("a", "a/b/file-3.txt", 5, "b/");
assertEquals(4, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a1/b1/file-0.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-3.txt", response.objects().get(3).key());
response = list("a", "a/b/file-3.txt", 10, "b/");
assertEquals(9, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a1/b1/file-0.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-8.txt", response.objects().get(8).key());
response = list("a", "a/b/file-3.txt", 11, "b/");
assertEquals(10, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/", response.commonPrefixes().get(0));
assertEquals("a1/b1/file-0.txt", response.objects().get(0).key());
assertEquals("a1/b1/file-9.txt", response.objects().get(9).key());
response = list("a", "a/b/", 1, "b/");
assertEquals(1, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
response = list("a/b/c/d", "", 100, "/file");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/c/d/file", response.commonPrefixes().get(0));
response = list("a/b/c/d/", "", 100, "file");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a/b/c/d/file", response.commonPrefixes().get(0));
// group objects by different delimiter under 'a1' or 'a1/'
response = list("a1", "", 100, "");
assertEquals(10, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
response = list("a1", "", 100, "/");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a1/", response.commonPrefixes().get(0));
response = list("a1/", "", 100, "/");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a1/b1/", response.commonPrefixes().get(0));
response = list("a1/", "", 1, "/");
assertEquals(0, response.objects().size());
assertEquals(1, response.commonPrefixes().size());
assertEquals("a1/b1/", response.commonPrefixes().get(0));
// group objects by non-exist delimiter under 'a1' or 'a1/'
response = list("a1", "", 100, "non-exist");
assertEquals(10, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
response = list("a1/", "", 100, "non-exist");
assertEquals(10, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
// test the sequent of common prefixes
response = list("a", "", 100, "b");
assertEquals(0, response.objects().size());
assertEquals(2, response.commonPrefixes().size());
assertEquals("a/b", response.commonPrefixes().get(0));
assertEquals("a1/b", response.commonPrefixes().get(1));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testOverwriteDirectoryWithAFile(ObjectStorage store) throws IOException {
setEnv(store);
String dirKey = "a/b/";
String key = "a/b";
storage.delete("a/");
byte[] data1 = new byte[0];
byte[] data2 = TestUtility.rand(128);
storage.put(dirKey, data1);
assertArrayEquals(data1, IOUtils.toByteArray(getStream(dirKey, 0, 256)));
if (!storage.bucket().isDirectory()) {
// Directory bucket doesn't allow overwrote if the resource type is changed.
storage.put(key, data2);
assertArrayEquals(data2, IOUtils.toByteArray(getStream(key, 0, 256)));
}
storage.delete(key);
storage.delete(dirKey);
assertNull(storage.head(key));
assertNull(storage.head(dirKey));
}
private InputStream getStream(String key, long off, long limit) {
return storage.get(key, off, limit).stream();
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testDeleteNonEmptyDir(ObjectStorage store) throws IOException {
setEnv(store);
storage.put("a/", new byte[0]);
storage.put("a/b/", new byte[0]);
assertArrayEquals(new byte[0], IOUtils.toByteArray(getStream("a/b/", 0, 256)));
ListObjectsResponse response = list("a/b/", "a/b/", 100, "/");
assertEquals(0, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
if (!storage.bucket().isDirectory()) {
// Directory bucket only supports list with delimiter = '/'.
response = list("a/b/", "a/b/", 100, null);
assertEquals(0, response.objects().size());
assertEquals(0, response.commonPrefixes().size());
}
storage.delete("a/b/");
assertNull(storage.head("a/b/"));
assertNull(storage.head("a/b"));
assertNotNull(storage.head("a/"));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testRecursiveDelete(ObjectStorage store) {
setEnv(store);
storage.put("a/", new byte[0]);
storage.put("a/b/", new byte[0]);
storage.put("a/b/c1/", new byte[0]);
storage.put("a/b/c2/", new byte[0]);
storage.put("a/b/c3/", new byte[0]);
assertNotNull(storage.head("a/"));
assertNotNull(storage.head("a/b/"));
assertNotNull(storage.head("a/b/c1/"));
assertNotNull(storage.head("a/b/c2/"));
assertNotNull(storage.head("a/b/c3/"));
storage.delete("a/b/c3/");
assertNull(storage.head("a/b/c3/"));
storage.deleteAll("");
assertNull(storage.head("a/b/c1/"));
assertNull(storage.head("a/b/c2/"));
assertNull(storage.head("a/b/"));
assertNull(storage.head("a/"));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListObjectKeys(ObjectStorage store) {
setEnv(store);
assumeFalse(storage.bucket().isDirectory());
byte[] dirBytes = new byte[0];
byte[] fileBytes = TestUtility.rand(128);
storage.put("a/b1/", dirBytes);
storage.put("a/b2/c0/", dirBytes);
storage.put("a/b2/c1/d1.txt", fileBytes);
storage.put("a/b2/c1/e1.txt", fileBytes);
storage.put("a/b2/c2.txt", fileBytes);
// list single dir
List<ObjectInfo> ret = toList(storage.list("a/b1", "", 10));
assertEquals(1, ret.size());
assertEquals("a/b1/", ret.get(0).key());
assertEquals(0, ret.get(0).size());
ret = toList(storage.list("a/b1/", "", 10));
assertEquals(1, ret.size());
assertEquals("a/b1/", ret.get(0).key());
assertEquals(0, ret.get(0).size());
// list single file
ret = toList(storage.list("a/b2/c1/d1.txt", "", 10));
assertEquals(1, ret.size());
assertEquals("a/b2/c1/d1.txt", ret.get(0).key());
assertEquals(fileBytes.length, ret.get(0).size());
// list multiple files & dirs
ret = toList(storage.list("a/b2", "", 10));
assertEquals(4, ret.size());
assertEquals("a/b2/c0/", ret.get(0).key());
assertEquals("a/b2/c1/d1.txt", ret.get(1).key());
assertEquals("a/b2/c1/e1.txt", ret.get(2).key());
assertEquals("a/b2/c2.txt", ret.get(3).key());
assertEquals(dirBytes.length, ret.get(0).size());
// list single file with marker
ret = toList(storage.list("a/b2", "a/b2/c1/e1.txt", 10));
assertEquals(1, ret.size());
assertEquals("a/b2/c2.txt", ret.get(0).key());
assertEquals(fileBytes.length, ret.get(0).size());
// list multiple files with marker
ret = toList(storage.list("a/b2", "a/b2/c1/", 10));
assertEquals(3, ret.size());
assertEquals("a/b2/c1/d1.txt", ret.get(0).key());
assertEquals("a/b2/c1/e1.txt", ret.get(1).key());
assertEquals("a/b2/c2.txt", ret.get(2).key());
assertEquals(fileBytes.length, ret.get(0).size());
// list multiple files & dirs with part path as prefix
ret = toList(storage.list("a/b2/c", "", 10));
assertEquals(4, ret.size());
assertEquals("a/b2/c0/", ret.get(0).key());
assertEquals("a/b2/c1/d1.txt", ret.get(1).key());
assertEquals("a/b2/c1/e1.txt", ret.get(2).key());
assertEquals("a/b2/c2.txt", ret.get(3).key());
assertEquals(dirBytes.length, ret.get(0).size());
ret = toList(storage.list("a/b2/c", "", 2));
assertEquals(2, ret.size());
assertEquals("a/b2/c0/", ret.get(0).key());
ret = toList(storage.list("a/b2/c1/d1.", "", 10));
assertEquals(1, ret.size());
assertEquals("a/b2/c1/d1.txt", ret.get(0).key());
assertEquals(fileBytes.length, ret.get(0).size());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListAllObjectKeys(ObjectStorage store) {
setEnv(store);
assumeFalse(storage.bucket().isDirectory());
byte[] dirBytes = new byte[0];
byte[] fileBytes = TestUtility.rand(128);
storage.put("a/b1/", dirBytes);
storage.put("a/b2/c0/", dirBytes);
storage.put("a/b2/c1/d1.txt", fileBytes);
storage.put("a/b2/c1/e1.txt", fileBytes);
storage.put("a/b2/c2.txt", dirBytes);
// list single dir
List<ObjectInfo> ret = Lists.newArrayList(storage.listAll("a/b1", ""));
assertEquals(1, ret.size());
assertEquals("a/b1/", ret.get(0).key());
assertEquals(0, ret.get(0).size());
// list single file
ret = Lists.newArrayList(storage.listAll("a/b2/c1/d1.txt", ""));
assertEquals(1, ret.size());
assertEquals("a/b2/c1/d1.txt", ret.get(0).key());
assertEquals(fileBytes.length, ret.get(0).size());
// list multiple files & dirs
ret = Lists.newArrayList(storage.listAll("a/b2", ""));
assertEquals(4, ret.size());
assertEquals("a/b2/c0/", ret.get(0).key());
assertEquals("a/b2/c1/d1.txt", ret.get(1).key());
assertEquals("a/b2/c1/e1.txt", ret.get(2).key());
assertEquals("a/b2/c2.txt", ret.get(3).key());
assertEquals(dirBytes.length, ret.get(0).size());
// list multiple files & dirs with part path as prefix
ret = Lists.newArrayList(storage.listAll("a/b2/c", ""));
assertEquals(4, ret.size());
assertEquals("a/b2/c0/", ret.get(0).key());
assertEquals("a/b2/c1/d1.txt", ret.get(1).key());
assertEquals("a/b2/c1/e1.txt", ret.get(2).key());
assertEquals("a/b2/c2.txt", ret.get(3).key());
assertEquals(dirBytes.length, ret.get(0).size());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListEmptyKeys(ObjectStorage store) {
setEnv(store);
if (storage.bucket().isDirectory()) {
assertEquals(0,
Lists.newArrayList(((DirectoryStorage) storage).listDir("not-exist", true)).size());
} else {
assertEquals(0, Lists.newArrayList(storage.list("not-exist", "", 2)).size());
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testMultiUploadEmptyFile(ObjectStorage store) {
setEnv(store);
String key = "a/b/empty.txt";
MultipartUpload upload = storage.createMultipartUpload(key);
assertThrows(Exception.class,
() -> storage.completeUpload(key, upload.uploadId(), Lists.newArrayList()));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testMultiUploadZeroByte(ObjectStorage store) throws IOException {
setEnv(store);
String key = "a/b/zero.txt";
MultipartUpload upload = storage.createMultipartUpload(key);
Part part =
storage.uploadPart(key, upload.uploadId(), 1, () -> new ByteArrayInputStream(new byte[0]),
0);
storage.completeUpload(key, upload.uploadId(), Lists.newArrayList(part));
assertArrayEquals(ObjectTestUtils.EMPTY_BYTES, IOUtils.toByteArray(getStream(key)));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testMultiUploadFile(ObjectStorage store) throws IOException {
setEnv(store);
String key1 = "a/b/c/e.txt";
String uploadId1 = storage.createMultipartUpload(key1).uploadId();
assertNotEquals(uploadId1, "");
byte[] dataset = multipleUpload(key1, uploadId1, 2, true);
assertArrayEquals(dataset, IOUtils.toByteArray(getStream(key1)));
String key2 = "a/b/e/e.txt";
String uploadId2 = storage.createMultipartUpload(key2).uploadId();
assertNotEquals(uploadId2, "");
dataset = multipleUpload(key2, uploadId2, 3, true);
assertArrayEquals(dataset, IOUtils.toByteArray(getStream(key2)));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testPutAndCompleteMPUWithSameContent(ObjectStorage store) throws IOException {
setEnv(store);
String mpu = "a/b/mpu.txt";
String put = "a/b/put.txt";
byte[] dataset = TestUtility.rand(11 << 20);
byte[] checksum = multipleUpload(mpu, dataset);
storage.put(put, dataset);
ObjectInfo mputObj = storage.head(mpu);
ObjectInfo putObj = storage.head(put);
assertArrayEquals(checksum, mputObj.checksum());
assertArrayEquals(checksum, putObj.checksum());
if (!storage.bucket().isDirectory()) {
List<ObjectInfo> objectInfo = toList(storage.list(mpu, null, 10));
assertEquals(mputObj, objectInfo.get(0));
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testListUploads(ObjectStorage store) {
setEnv(store);
String key1 = "a/b/c/e.txt";
String uploadId1 = storage.createMultipartUpload(key1).uploadId();
assertNotEquals(uploadId1, "");
multipleUpload(key1, uploadId1, 2, false);
String key2 = "a/b/e/e.txt";
String uploadId2 = storage.createMultipartUpload(key2).uploadId();
assertNotEquals(uploadId2, "");
multipleUpload(key2, uploadId2, 3, false);
Iterable<MultipartUpload> iterable = storage.listUploads("");
List<MultipartUpload> uploads = Lists.newArrayList(iterable.iterator());
assertEquals(2, uploads.size());
assertEquals(key1, uploads.get(0).key());
assertEquals(uploadId1, uploads.get(0).uploadId());
assertEquals(key2, uploads.get(1).key());
assertEquals(uploadId2, uploads.get(1).uploadId());
// check iterator is idempotent
uploads = Lists.newArrayList(iterable.iterator());
assertEquals(2, uploads.size());
assertEquals(key1, uploads.get(0).key());
assertEquals(uploadId1, uploads.get(0).uploadId());
assertEquals(key2, uploads.get(1).key());
assertEquals(uploadId2, uploads.get(1).uploadId());
uploads = Lists.newArrayList(storage.listUploads("a/b/"));
assertEquals(2, uploads.size());
assertEquals(key1, uploads.get(0).key());
assertEquals(uploadId1, uploads.get(0).uploadId());
assertEquals(key2, uploads.get(1).key());
assertEquals(uploadId2, uploads.get(1).uploadId());
uploads = Lists.newArrayList(storage.listUploads("a/b/c/"));
assertEquals(1, uploads.size());
assertEquals(key1, uploads.get(0).key());
assertEquals(uploadId1, uploads.get(0).uploadId());
storage.abortMultipartUpload(key1, uploadId1);
storage.abortMultipartUpload(key2, uploadId2);
assertEquals(0, Lists.newArrayList((storage.listUploads("a/b/"))).size());
}
private byte[] multipleUpload(String key, String uploadId, int partCnt, boolean completeUpload) {
int partSize = 5 * 1024 * 1024;
byte[] dataset = new byte[partCnt * partSize];
byte[] partData = TestUtility.rand(partSize);
try {
int offset = 0;
List<Part> parts = new ArrayList<>();
for (int i = 1; i <= partCnt; i++) {
Part part = storage.uploadPart(key, uploadId, i, () -> new ByteArrayInputStream(partData),
partData.length);
parts.add(part);
System.arraycopy(partData, 0, dataset, offset, partData.length);
offset += partData.length;
}
if (completeUpload) {
storage.completeUpload(key, uploadId, parts);
}
} catch (RuntimeException e) {
storage.abortMultipartUpload(key, uploadId);
}
return dataset;
}
private byte[] multipleUpload(String key, byte[] dataset) throws IOException {
int partSize = 5 * 1024 * 1024;
int partCnt = (int) Math.ceil((double) dataset.length / partSize);
String uploadId = storage.createMultipartUpload(key).uploadId();
assertNotEquals(uploadId, "");
try {
List<Part> parts = new ArrayList<>();
for (int i = 0; i < partCnt; i++) {
int start = i * partSize;
int end = Math.min(dataset.length, start + partSize);
byte[] partData = Arrays.copyOfRange(dataset, start, end);
Part part =
storage.uploadPart(key, uploadId, i + 1, () -> new ByteArrayInputStream(partData),
partData.length);
assertEquals(DigestUtils.md5Hex(partData), part.eTag().replace("\"", ""));
parts.add(part);
}
byte[] checksum = storage.completeUpload(key, uploadId, parts);
assertArrayEquals(dataset, IOUtils.toByteArray(getStream(key)));
return checksum;
} catch (IOException | RuntimeException e) {
storage.abortMultipartUpload(key, uploadId);
throw e;
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testUploadPartCopy10MB(ObjectStorage store) {
setEnv(store);
String srcKey = "src10MB.txt";
String dstKey = "dst10MB.txt";
testUploadPartCopy(srcKey, dstKey, 10 << 20); // 10MB
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testUploadPartCopy100MB(ObjectStorage store) {
setEnv(store);
String srcKey = "src100MB.txt";
String dstKey = "dst100MB.txt";
testUploadPartCopy(srcKey, dstKey, 100 << 20); // 100MB
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testUploadPartCopy65MB(ObjectStorage store) {
setEnv(store);
String srcKey = "src65MB.txt";
String dstKey = "dst65MB.txt";
testUploadPartCopy(srcKey, dstKey, 65 << 20); // 65MB
}
private void testUploadPartCopy(String srcKey, String key, int fileSize) {
MultipartUpload srcMultipartUpload = storage.createMultipartUpload(srcKey);
long partSize = 5 << 20;
int partCnt = (int) (fileSize / partSize + (fileSize % partSize == 0 ? 0 : 1));
byte[] data =
multipleUpload(srcMultipartUpload.key(), srcMultipartUpload.uploadId(), partCnt, true);
MultipartUpload dstMultipartUpload = storage.createMultipartUpload(key);
long copyPartRangeStart = 0L;
List<Part> results = Lists.newArrayList();
try {
for (int i = 0; i < partCnt; i++) {
Part result = storage.uploadPartCopy(srcKey, key, dstMultipartUpload.uploadId(), i + 1,
copyPartRangeStart, Math.min(copyPartRangeStart + partSize, fileSize) - 1);
results.add(result);
copyPartRangeStart += partSize;
}
storage.completeUpload(key, dstMultipartUpload.uploadId(), results);
assertArrayEquals(data, IOUtils.toByteArray(getStream(key)));
} catch (Exception e) {
storage.abortMultipartUpload(key, dstMultipartUpload.uploadId());
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testCopy0MB(ObjectStorage store) throws IOException {
setEnv(store);
String srcKey = "src0MB.txt";
String dstKey = "dst0MB.txt";
testCopy(srcKey, dstKey, 0);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testCopy5MB(ObjectStorage store) throws IOException {
setEnv(store);
String srcKey = "src5MB.txt";
String dstKey = "dst5MB.txt";
testCopy(srcKey, dstKey, 5 << 20);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testCopy10MB(ObjectStorage store) throws IOException {
setEnv(store);
String srcKey = "src10MB.txt";
String dstKey = "dst10MB.txt";
testCopy(srcKey, dstKey, 10 << 20);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testRename(ObjectStorage store) throws IOException {
setEnv(store);
String srcKey = "src.txt";
String dstKey = "dst.txt";
// Rename source to a un-exist object
renameObject(srcKey, dstKey, 256);
renameObject(srcKey, dstKey, 0);
// Overwrite an existing object
renameObjectWhenDestExist(srcKey, dstKey, 256, 0);
renameObjectWhenDestExist(srcKey, dstKey, 0, 256);
assertNull(storage.head(srcKey));
assertThrows(RuntimeException.class, () -> storage.rename(srcKey, dstKey),
"Source key not found");
assertThrows(RuntimeException.class, () -> renameObject(srcKey, srcKey, 256),
"Cannot rename to the same object");
}
private void renameObjectWhenDestExist(String srcKey, String dstKey, int srcSize, int destSize)
throws IOException {
byte[] dstData = new byte[destSize];
storage.put(dstKey, dstData, 0, destSize);
assertArrayEquals(dstData, IOUtils.toByteArray(getStream(dstKey)));
renameObject(srcKey, dstKey, srcSize);
}
private void renameObject(String srcKey, String dstKey, int fileSize) throws IOException {
byte[] data = new byte[fileSize];
storage.put(srcKey, data, 0, fileSize);
assertArrayEquals(data, IOUtils.toByteArray(getStream(srcKey)));
storage.rename(srcKey, dstKey);
assertArrayEquals(data, IOUtils.toByteArray(getStream(dstKey)));
assertNull(storage.head(srcKey));
storage.delete(dstKey);
assertNull(storage.head(dstKey));
}
private void testCopy(String srcKey, String dstKey, int fileSize) throws IOException {
byte[] data = new byte[fileSize];
storage.put(srcKey, data, 0, fileSize);
storage.copy(srcKey, dstKey);
assertArrayEquals(data, IOUtils.toByteArray(getStream(dstKey)));
}
private ListObjectsResponse list(String prefix, String startAfter, int limit, String delimiter) {
Preconditions.checkArgument(limit <= 1000, "Cannot list more than 1000 objects.");
ListObjectsRequest request = ListObjectsRequest.builder()
.prefix(prefix)
.startAfter(startAfter)
.maxKeys(limit)
.delimiter(delimiter)
.build();
Iterator<ListObjectsResponse> iterator = storage.list(request).iterator();
if (iterator.hasNext()) {
return iterator.next();
} else {
return new ListObjectsResponse(new ArrayList<>(), new ArrayList<>());
}
}
private static <T> List<T> toList(final Iterable<T> iterable) {
return StreamSupport.stream(iterable.spliterator(), false)
.collect(Collectors.toList());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testObjectTagging(ObjectStorage store) {
setEnv(store);
assumeFalse(storage.bucket().isDirectory());
if (storage instanceof FileStore) {
return;
}
// create key.
String key = "ObjectTagging";
String tagPrefix = "tag" + UUIDUtils.random() + "_";
String valuePrefix = "value" + UUIDUtils.random() + "_";
storage.put(key, new byte[0], 0, 0);
Map<String, String> tagsMap = new HashMap<>();
for (int i = 0; i < 10; i++) {
tagsMap.put(tagPrefix + i, valuePrefix + i);
}
// 1. put and get when key exists.
storage.putTags(key, tagsMap);
Map<String, String> tags = storage.getTags(key);
assertEquals(10, tags.keySet().size());
assertTrue(Maps.difference(tagsMap, tags).areEqual());
// 2. put and get when key doesn't exist.
assertThrows(TosServerException.class, () -> storage.putTags("non-exist-key", tagsMap),
"NoSuchKey");
assertThrows(TosServerException.class, () -> storage.getTags("non-exist-key"), "doesn't exist");
// 3. tag threshold.
Map<String, String> bigMap = new HashMap<>(tagsMap);
bigMap.put(tagPrefix + 11, valuePrefix + 11);
assertThrows(RuntimeException.class, () -> storage.putTags(key, bigMap), "exceed limit of 10");
// 4. put tag with null tagName.
Map<String, String> nullKeyTag = new HashMap<>();
nullKeyTag.put(null, "some value");
assertThrows(TosServerException.class, () -> storage.putTags(key, nullKeyTag),
"TagKey you have provided is invalid");
// 5. put tag with null value.
Map<String, String> nullValueTag = new HashMap<>();
nullValueTag.put("some-key", null);
storage.putTags(key, nullValueTag);
assertNull(storage.getTags(key).get("some-key"));
// 6. remove tags.
Map<String, String> emptyTag = new HashMap<>();
storage.putTags(key, emptyTag);
assertEquals(0, storage.getTags(key).size());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testObjectChecksum(ObjectStorage store) throws IOException {
setEnv(store);
byte[] data = TestUtility.rand(256);
String key = "a/truncated.txt";
// Read object at the end offset.
byte[] checksum = storage.put(key, () -> new ByteArrayInputStream(data), 200);
ObjectContent objContent = storage.get(key, 200, -1);
objContent.stream().close();
assertArrayEquals(checksum, objContent.checksum());
// Read empty object.
checksum = storage.put(key, () -> new ByteArrayInputStream(new byte[0]), 0);
objContent = storage.get(key, 0, -1);
objContent.stream().close();
assertArrayEquals(checksum, objContent.checksum());
}
}
|
TestObjectStorage
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/junit4/statements/RunAfterTestExecutionCallbacks.java
|
{
"start": 1285,
"end": 1628
}
|
class ____ JUnit 4.9 or higher.
*
* @author Sam Brannen
* @since 5.0
* @see #evaluate()
* @see RunBeforeTestExecutionCallbacks
* @deprecated since Spring Framework 7.0 in favor of the
* {@link org.springframework.test.context.junit.jupiter.SpringExtension SpringExtension}
* and JUnit Jupiter
*/
@Deprecated(since = "7.0")
public
|
requires
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/engine/support/hierarchical/ParallelExecutionIntegrationTests.java
|
{
"start": 17586,
"end": 17852
}
|
class ____ {
static AtomicInteger sharedResource;
static CountDownLatch countDownLatch;
@BeforeAll
static void initialize() {
sharedResource = new AtomicInteger();
countDownLatch = new CountDownLatch(2);
}
@Nested
|
IsolatedNestedMethodFirstTestCase
|
java
|
apache__flink
|
flink-formats/flink-csv/src/main/java/org/apache/flink/formats/csv/RowDataToCsvConverters.java
|
{
"start": 2539,
"end": 2636
}
|
class ____ to convert from {@link RowData} to CSV-format {@link JsonNode}. * */
@Internal
public
|
used
|
java
|
netty__netty
|
codec-socks/src/test/java/io/netty/handler/codec/socksx/v5/DefaultSocks5PasswordAuthRequestTest.java
|
{
"start": 781,
"end": 3481
}
|
class ____ {
@Test
public void testConstructorParamsAreNotNull() {
try {
new DefaultSocks5PasswordAuthRequest(null, "");
} catch (Exception e) {
assertTrue(e instanceof NullPointerException);
}
try {
new DefaultSocks5PasswordAuthRequest("", null);
} catch (Exception e) {
assertTrue(e instanceof NullPointerException);
}
}
@Test
public void testUsernameOrPasswordIsNotAscii() {
try {
new DefaultSocks5PasswordAuthRequest("παράδειγμα.δοκιμή", "password");
} catch (Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
try {
new DefaultSocks5PasswordAuthRequest("username", "παράδειγμα.δοκιμή");
} catch (Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
}
@Test
public void testUsernameOrPasswordLengthIsLessThan255Chars() {
try {
new DefaultSocks5PasswordAuthRequest(
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword",
"password");
} catch (Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
try {
new DefaultSocks5PasswordAuthRequest("password",
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword" +
"passwordpasswordpasswordpasswordpasswordpasswordpassword");
} catch (Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
}
}
|
DefaultSocks5PasswordAuthRequestTest
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultComponentNameResolver.java
|
{
"start": 1155,
"end": 2070
}
|
class ____ implements ComponentNameResolver {
public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/component/*";
@Override
public Set<String> resolveNames(CamelContext camelContext) {
try {
return PluginHelper.getPackageScanResourceResolver(camelContext)
.findResources(RESOURCE_PATH)
.stream()
.map(Resource::getLocation)
// remove leading path to only keep name
// searching for last separator: Jar path separator (/), Unix path (/) and Windows path separator (\)
.map(l -> l.substring(Math.max(l.lastIndexOf('/'), l.lastIndexOf('\\')) + 1))
.collect(Collectors.toCollection(TreeSet::new));
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
}
}
|
DefaultComponentNameResolver
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/FileAssertBaseTest.java
|
{
"start": 928,
"end": 1016
}
|
class ____ {@link FileAssert} tests.
*
* @author Olivier Michallat
*/
public abstract
|
for
|
java
|
spring-projects__spring-boot
|
core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/lifecycle/DockerComposeProperties.java
|
{
"start": 3457,
"end": 4409
}
|
class ____ {
/**
* Command used to start Docker Compose.
*/
private StartCommand command = StartCommand.UP;
/**
* Log level for output.
*/
private LogLevel logLevel = LogLevel.INFO;
/**
* Whether to skip executing the start command.
*/
private Skip skip = Skip.IF_RUNNING;
/**
* Arguments to pass to the start command.
*/
private final List<String> arguments = new ArrayList<>();
public StartCommand getCommand() {
return this.command;
}
public void setCommand(StartCommand command) {
this.command = command;
}
public LogLevel getLogLevel() {
return this.logLevel;
}
public void setLogLevel(LogLevel logLevel) {
this.logLevel = logLevel;
}
public Skip getSkip() {
return this.skip;
}
public void setSkip(Skip skip) {
this.skip = skip;
}
public List<String> getArguments() {
return this.arguments;
}
/**
* Start command skip mode.
*/
public
|
Start
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/ProcessRefClassTest.java
|
{
"start": 973,
"end": 1632
}
|
class ____ extends ContextTestSupport {
@Test
public void testProcessRefClass() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("CamelCamel");
template.sendBody("direct:start", "Camel");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.process("#class:org.apache.camel.processor.EchoProcessor")
.to("mock:result");
}
};
}
}
|
ProcessRefClassTest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/DaggerSuperficialValidationTest.java
|
{
"start": 5573,
"end": 5808
}
|
class ____ {",
" abstract Map<Set<?>, MissingType<?>> blah();",
"}"),
CompilerTests.kotlinSource(
"test.TestClass.kt",
"package test",
"",
"abstract
|
TestClass
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableDelaySubscriptionOther.java
|
{
"start": 3147,
"end": 4403
}
|
class ____ extends AtomicReference<Subscription> implements FlowableSubscriber<Object> {
private static final long serialVersionUID = -3892798459447644106L;
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.setOnce(this, s)) {
s.request(Long.MAX_VALUE);
}
}
@Override
public void onNext(Object t) {
Subscription s = get();
if (s != SubscriptionHelper.CANCELLED) {
lazySet(SubscriptionHelper.CANCELLED);
s.cancel();
next();
}
}
@Override
public void onError(Throwable t) {
Subscription s = get();
if (s != SubscriptionHelper.CANCELLED) {
downstream.onError(t);
} else {
RxJavaPlugins.onError(t);
}
}
@Override
public void onComplete() {
Subscription s = get();
if (s != SubscriptionHelper.CANCELLED) {
next();
}
}
}
}
}
|
OtherSubscriber
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/RequestMeta.java
|
{
"start": 296,
"end": 1150
}
|
class ____ {
private int fetchSize;
private long pageTimeoutInMs;
private long queryTimeoutInMs;
RequestMeta(int fetchSize, long pageTimeoutInMs, long queryTimeoutInMs) {
this.fetchSize = fetchSize;
this.pageTimeoutInMs = pageTimeoutInMs;
this.queryTimeoutInMs = queryTimeoutInMs;
}
RequestMeta queryTimeout(long timeout) {
this.queryTimeoutInMs = timeout;
return this;
}
RequestMeta pageTimeout(long timeout) {
this.pageTimeoutInMs = timeout;
return this;
}
RequestMeta fetchSize(int size) {
this.fetchSize = size;
return this;
}
int fetchSize() {
return fetchSize;
}
long pageTimeoutInMs() {
return pageTimeoutInMs;
}
long queryTimeoutInMs() {
return queryTimeoutInMs;
}
}
|
RequestMeta
|
java
|
elastic__elasticsearch
|
x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java
|
{
"start": 1597,
"end": 10708
}
|
class ____ extends AbstractXpackFullClusterRestartTestCase {
private static final String OLD_CLUSTER_OPEN_JOB_ID = "migration-old-cluster-open-job";
private static final String OLD_CLUSTER_STARTED_DATAFEED_ID = "migration-old-cluster-started-datafeed";
private static final String OLD_CLUSTER_CLOSED_JOB_ID = "migration-old-cluster-closed-job";
private static final String OLD_CLUSTER_STOPPED_DATAFEED_ID = "migration-old-cluster-stopped-datafeed";
public MlMigrationFullClusterRestartIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) {
super(upgradeStatus);
}
@Override
protected Settings restClientSettings() {
String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Before
public void waitForMlTemplates() throws Exception {
// We shouldn't wait for ML templates during the upgrade - production won't
if (isRunningAgainstOldCluster()) {
XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES);
}
}
private void createTestIndex() throws IOException {
Request createTestIndex = new Request("PUT", "/airline-data");
createTestIndex.setJsonEntity("""
{
"mappings": {
"doc": {
"properties": {
"time": {
"type": "date"
},
"airline": {
"type": "keyword"
},
"responsetime": {
"type": "float"
}
}
}
}
}""");
client().performRequest(createTestIndex);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36816")
public void testMigration() throws Exception {
if (isRunningAgainstOldCluster()) {
createTestIndex();
oldClusterTests();
} else {
upgradedClusterTests();
}
}
private void oldClusterTests() throws IOException {
// create jobs and datafeeds
putJob(OLD_CLUSTER_CLOSED_JOB_ID);
DatafeedConfig.Builder stoppedDfBuilder = new DatafeedConfig.Builder(OLD_CLUSTER_STOPPED_DATAFEED_ID, OLD_CLUSTER_CLOSED_JOB_ID);
stoppedDfBuilder.setIndices(Collections.singletonList("airline-data"));
Request putStoppedDatafeed = new Request("PUT", "/_xpack/ml/datafeeds/" + OLD_CLUSTER_STOPPED_DATAFEED_ID);
putStoppedDatafeed.setJsonEntity(Strings.toString(stoppedDfBuilder.build()));
client().performRequest(putStoppedDatafeed);
// open job and started datafeed
putJob(OLD_CLUSTER_OPEN_JOB_ID);
Request openOpenJob = new Request("POST", "_xpack/ml/anomaly_detectors/" + OLD_CLUSTER_OPEN_JOB_ID + "/_open");
client().performRequest(openOpenJob);
DatafeedConfig.Builder dfBuilder = new DatafeedConfig.Builder(OLD_CLUSTER_STARTED_DATAFEED_ID, OLD_CLUSTER_OPEN_JOB_ID);
dfBuilder.setIndices(Collections.singletonList("airline-data"));
addAggregations(dfBuilder);
Request putDatafeed = new Request("PUT", "_xpack/ml/datafeeds/" + OLD_CLUSTER_STARTED_DATAFEED_ID);
putDatafeed.setJsonEntity(Strings.toString(dfBuilder.build()));
client().performRequest(putDatafeed);
Request startDatafeed = new Request("POST", "_xpack/ml/datafeeds/" + OLD_CLUSTER_STARTED_DATAFEED_ID + "/_start");
client().performRequest(startDatafeed);
}
private void upgradedClusterTests() throws Exception {
waitForJobToBeAssigned(OLD_CLUSTER_OPEN_JOB_ID);
waitForDatafeedToBeAssigned(OLD_CLUSTER_STARTED_DATAFEED_ID);
// The persistent task params for the job & datafeed left open
// during upgrade should be updated with new fields
checkTaskParamsAreUpdated(OLD_CLUSTER_OPEN_JOB_ID, OLD_CLUSTER_STARTED_DATAFEED_ID);
// open the migrated job and datafeed
Request openJob = new Request("POST", "_ml/anomaly_detectors/" + OLD_CLUSTER_CLOSED_JOB_ID + "/_open");
client().performRequest(openJob);
Request startDatafeed = new Request("POST", "_ml/datafeeds/" + OLD_CLUSTER_STOPPED_DATAFEED_ID + "/_start");
client().performRequest(startDatafeed);
waitForJobToBeAssigned(OLD_CLUSTER_CLOSED_JOB_ID);
waitForDatafeedToBeAssigned(OLD_CLUSTER_STOPPED_DATAFEED_ID);
}
@SuppressWarnings("unchecked")
private void waitForJobToBeAssigned(String jobId) throws Exception {
assertBusy(() -> {
Request getJobStats = new Request("GET", "_ml/anomaly_detectors/" + jobId + "/_stats");
Response response = client().performRequest(getJobStats);
Map<String, Object> stats = entityAsMap(response);
List<Map<String, Object>> jobStats = (List<Map<String, Object>>) XContentMapValues.extractValue("jobs", stats);
assertEquals(jobId, XContentMapValues.extractValue("job_id", jobStats.get(0)));
assertEquals("opened", XContentMapValues.extractValue("state", jobStats.get(0)));
assertThat((String) XContentMapValues.extractValue("assignment_explanation", jobStats.get(0)), is(emptyOrNullString()));
assertNotNull(XContentMapValues.extractValue("node", jobStats.get(0)));
}, 30, TimeUnit.SECONDS);
}
@SuppressWarnings("unchecked")
private void waitForDatafeedToBeAssigned(String datafeedId) throws Exception {
assertBusy(() -> {
Request getDatafeedStats = new Request("GET", "_ml/datafeeds/" + datafeedId + "/_stats");
Response response = client().performRequest(getDatafeedStats);
Map<String, Object> stats = entityAsMap(response);
List<Map<String, Object>> datafeedStats = (List<Map<String, Object>>) XContentMapValues.extractValue("datafeeds", stats);
assertEquals(datafeedId, XContentMapValues.extractValue("datafeed_id", datafeedStats.get(0)));
assertEquals("started", XContentMapValues.extractValue("state", datafeedStats.get(0)));
assertThat((String) XContentMapValues.extractValue("assignment_explanation", datafeedStats.get(0)), is(emptyOrNullString()));
assertNotNull(XContentMapValues.extractValue("node", datafeedStats.get(0)));
}, 30, TimeUnit.SECONDS);
}
@SuppressWarnings("unchecked")
private void checkTaskParamsAreUpdated(String jobId, String datafeedId) throws Exception {
Request getClusterState = new Request("GET", "/_cluster/state/metadata");
Response response = client().performRequest(getClusterState);
Map<String, Object> responseMap = entityAsMap(response);
List<Map<String, Object>> tasks = (List<Map<String, Object>>) XContentMapValues.extractValue(
"metadata.persistent_tasks.tasks",
responseMap
);
assertNotNull(tasks);
for (Map<String, Object> task : tasks) {
String id = (String) task.get("id");
assertNotNull(id);
if (id.equals(MlTasks.jobTaskId(jobId))) {
Object jobParam = XContentMapValues.extractValue("task.xpack/ml/job.params.job", task);
assertNotNull(jobParam);
} else if (id.equals(MlTasks.datafeedTaskId(datafeedId))) {
Object jobIdParam = XContentMapValues.extractValue("task.xpack/ml/datafeed.params.job_id", task);
assertNotNull(jobIdParam);
Object indices = XContentMapValues.extractValue("task.xpack/ml/datafeed.params.indices", task);
assertNotNull(indices);
}
}
}
private void addAggregations(DatafeedConfig.Builder dfBuilder) {
TermsAggregationBuilder airline = AggregationBuilders.terms("airline");
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time").subAggregation(airline);
dfBuilder.setParsedAggregations(
AggregatorFactories.builder()
.addAggregator(AggregationBuilders.histogram("time").interval(300000).subAggregation(maxTime).field("time"))
);
}
private void putJob(String jobId) throws IOException {
String jobConfig = Strings.format("""
{
"job_id": "%s",
"analysis_config": {
"bucket_span": "10m",
"detectors": [{
"function": "metric",
"by_field_name": "airline",
"field_name": "responsetime"
}]
},
"data_description": {}`
}""", jobId);
Request putClosedJob = new Request("PUT", "/_xpack/ml/anomaly_detectors/" + jobId);
putClosedJob.setJsonEntity(jobConfig);
client().performRequest(putClosedJob);
}
}
|
MlMigrationFullClusterRestartIT
|
java
|
apache__kafka
|
server/src/main/java/org/apache/kafka/server/AssignmentsManager.java
|
{
"start": 8882,
"end": 9665
}
|
class ____ implements EventQueue.Event {
@Override
public void run() {
log.info("shutting down.");
try {
channelManager.shutdown();
} catch (Exception e) {
log.error("Unexpected exception shutting down NodeToControllerChannelManager", e);
}
try {
metricsRegistry.removeMetric(DEPRECATED_QUEUED_REPLICA_TO_DIR_ASSIGNMENTS_METRIC);
metricsRegistry.removeMetric(QUEUED_REPLICA_TO_DIR_ASSIGNMENTS_METRIC);
} catch (Exception e) {
log.error("Unexpected exception removing metrics.", e);
}
}
}
/**
* An event that processes the assignments in the ready map.
*/
private
|
ShutdownEvent
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/base/DaggerSuperficialValidation.java
|
{
"start": 4803,
"end": 5156
}
|
class ____ or throws {@link ValidationException} if it
* is not accessible in the current compilation.
*/
public static XTypeElement requireTypeElement(
XProcessingEnv processingEnv, XClassName className) {
return requireTypeElement(processingEnv, className.getCanonicalName());
}
/**
* Returns the type element with the given
|
name
|
java
|
apache__flink
|
flink-yarn-tests/src/test/java/org/apache/flink/yarn/UtilsTest.java
|
{
"start": 2369,
"end": 7709
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(UtilsTest.class);
@TempDir File temporaryFolder;
@Test
void testUberjarLocator() {
File dir = TestUtils.findFile("..", new TestUtils.RootDirFilenameFilter());
assertThat(dir).isNotNull();
assertThat(dir.getName()).endsWith(".jar");
dir = dir.getParentFile().getParentFile(); // from uberjar to lib to root
assertThat(dir).exists().isDirectory();
assertThat(dir.list()).contains("lib", "bin", "conf");
}
@Test
void testCreateTaskExecutorCredentials() throws Exception {
File root = temporaryFolder;
File home = new File(root, "home");
boolean created = home.mkdir();
assertThat(created).isTrue();
Configuration flinkConf = new Configuration();
YarnConfiguration yarnConf = new YarnConfiguration();
Map<String, String> env = new HashMap<>();
env.put(YarnConfigKeys.ENV_APP_ID, "foo");
env.put(YarnConfigKeys.ENV_CLIENT_HOME_DIR, home.getAbsolutePath());
env.put(YarnConfigKeys.ENV_CLIENT_SHIP_FILES, "");
env.put(YarnConfigKeys.ENV_FLINK_CLASSPATH, "");
env.put(YarnConfigKeys.ENV_HADOOP_USER_NAME, "foo");
env.put(
YarnConfigKeys.FLINK_DIST_JAR,
new YarnLocalResourceDescriptor(
"flink.jar",
new Path(root.toURI()),
0,
System.currentTimeMillis(),
LocalResourceVisibility.APPLICATION,
LocalResourceType.FILE)
.toString());
env.put(YarnConfigKeys.FLINK_YARN_FILES, "");
env.put(ApplicationConstants.Environment.PWD.key(), home.getAbsolutePath());
env = Collections.unmodifiableMap(env);
final YarnResourceManagerDriverConfiguration yarnResourceManagerDriverConfiguration =
new YarnResourceManagerDriverConfiguration(env, "localhost", null);
File credentialFile = temporaryFolder.toPath().resolve("container_tokens").toFile();
credentialFile.createNewFile();
final Text amRmTokenKind = AMRMTokenIdentifier.KIND_NAME;
final Text hdfsDelegationTokenKind = new Text("HDFS_DELEGATION_TOKEN");
final Text amRmTokenService = new Text("rm-ip:8030");
final Text hdfsDelegationTokenService = new Text("ha-hdfs:hadoop-namespace");
Credentials amCredentials = new Credentials();
amCredentials.addToken(
amRmTokenService,
new Token<>(new byte[4], new byte[4], amRmTokenKind, amRmTokenService));
amCredentials.addToken(
hdfsDelegationTokenService,
new Token<>(
new byte[4],
new byte[4],
hdfsDelegationTokenKind,
hdfsDelegationTokenService));
amCredentials.writeTokenStorageFile(
new org.apache.hadoop.fs.Path(credentialFile.getAbsolutePath()), yarnConf);
TaskExecutorProcessSpec spec =
TaskExecutorProcessUtils.newProcessSpecBuilder(flinkConf)
.withTotalProcessMemory(MemorySize.parse("1g"))
.build();
ContaineredTaskManagerParameters tmParams =
new ContaineredTaskManagerParameters(spec, new HashMap<>(1));
Configuration taskManagerConf = new Configuration();
String workingDirectory = root.getAbsolutePath();
Class<?> taskManagerMainClass = YarnTaskExecutorRunner.class;
ContainerLaunchContext ctx;
final Map<String, String> originalEnv = System.getenv();
try {
Map<String, String> systemEnv = new HashMap<>(originalEnv);
systemEnv.put("HADOOP_TOKEN_FILE_LOCATION", credentialFile.getAbsolutePath());
CommonTestUtils.setEnv(systemEnv);
ctx =
Utils.createTaskExecutorContext(
flinkConf,
yarnConf,
yarnResourceManagerDriverConfiguration,
tmParams,
"",
workingDirectory,
taskManagerMainClass,
LOG);
} finally {
CommonTestUtils.setEnv(originalEnv);
}
Credentials credentials = new Credentials();
try (DataInputStream dis =
new DataInputStream(new ByteArrayInputStream(ctx.getTokens().array()))) {
credentials.readTokenStorageStream(dis);
}
Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
boolean hasHdfsDelegationToken = false;
boolean hasAmRmToken = false;
for (Token<? extends TokenIdentifier> token : tokens) {
if (token.getKind().equals(amRmTokenKind)) {
hasAmRmToken = true;
} else if (token.getKind().equals(hdfsDelegationTokenKind)) {
hasHdfsDelegationToken = true;
}
}
assertThat(hasHdfsDelegationToken).isTrue();
assertThat(hasAmRmToken).isFalse();
}
}
|
UtilsTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java
|
{
"start": 460,
"end": 2044
}
|
enum ____ {
SOURCE,
DOC_VALUE,
SCRIPT_FIELD
}
/**
* @return The name of the field as expected by the user
*/
String getName();
/**
* This is the name of the field we should search for.
* In most cases this is the same as {@link #getName()}.
* However, if the field is a non-aggregatable multi-field
* we cannot retrieve it from source. Thus we search for
* its parent instead.
* @return The name of the field that is searched.
*/
String getSearchField();
/**
* @return The field types
*/
Set<String> getTypes();
/**
* @return The extraction {@link Method}
*/
Method getMethod();
/**
* Extracts the value from a {@link SearchHit}
*
* @param hit the search hit
* @param source the source supplier
* @return the extracted value
*/
Object[] value(SearchHit hit, SourceSupplier source);
/**
* @return Whether the field can be fetched from source instead
*/
boolean supportsFromSource();
/**
* @return A new extraction field that's fetching from source
*/
ExtractedField newFromSource();
/**
* @return Whether it is a multi-field
*/
boolean isMultiField();
/**
* @return The multi-field parent
*/
default String getParentField() {
throw new UnsupportedOperationException();
}
/**
* @return The doc_value format
*/
default String getDocValueFormat() {
throw new UnsupportedOperationException();
}
}
|
Method
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/JapaneseWordPieceAnalyzer.java
|
{
"start": 449,
"end": 1076
}
|
class ____ extends WordPieceAnalyzer {
public JapaneseWordPieceAnalyzer(
List<String> vocabulary,
List<String> neverSplit,
boolean doLowerCase,
boolean doStripAccents,
String unknownToken
) {
// For Japanese text with JapaneseTokenizer(morphological analyzer), always disable the punctuation (doTokenizeCjKChars=false)
super(vocabulary, neverSplit, doLowerCase, false, doStripAccents, unknownToken);
}
protected Tokenizer createTokenizer() {
return new JapaneseTokenizer(null, false, JapaneseTokenizer.Mode.SEARCH);
}
}
|
JapaneseWordPieceAnalyzer
|
java
|
elastic__elasticsearch
|
build-tools/src/main/java/org/elasticsearch/gradle/LazyPropertyList.java
|
{
"start": 757,
"end": 6142
}
|
class ____<T> extends AbstractLazyPropertyCollection implements List<T> {
private final List<PropertyListEntry<T>> delegate = new ArrayList<>();
public LazyPropertyList(String name) {
super(name);
}
public LazyPropertyList(String name, Object owner) {
super(name, owner);
}
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean contains(Object o) {
return delegate.stream().anyMatch(entry -> entry.getValue().equals(o));
}
@Override
public Iterator<T> iterator() {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).iterator();
}
@Override
public Object[] toArray() {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).toArray();
}
@Override
public <T1> T1[] toArray(T1[] a) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).toList().toArray(a);
}
@Override
public boolean add(T t) {
return delegate.add(new PropertyListEntry<>(() -> t, PropertyNormalization.DEFAULT));
}
public boolean add(Supplier<T> supplier) {
return delegate.add(new PropertyListEntry<>(supplier, PropertyNormalization.DEFAULT));
}
public boolean add(Supplier<T> supplier, PropertyNormalization normalization) {
return delegate.add(new PropertyListEntry<>(supplier, normalization));
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support remove()");
}
@Override
public boolean containsAll(Collection<?> c) {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toSet()).containsAll(c);
}
@Override
public boolean addAll(Collection<? extends T> c) {
c.forEach(this::add);
return true;
}
@Override
public boolean addAll(int index, Collection<? extends T> c) {
int i = index;
for (T item : c) {
this.add(i++, item);
}
return true;
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support removeAll()");
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support retainAll()");
}
@Override
public void clear() {
delegate.clear();
}
@Override
public T get(int index) {
PropertyListEntry<T> entry = delegate.get(index);
validate(entry);
return entry.getValue();
}
@Override
public T set(int index, T element) {
return delegate.set(index, new PropertyListEntry<>(() -> element, PropertyNormalization.DEFAULT)).getValue();
}
@Override
public void add(int index, T element) {
delegate.add(index, new PropertyListEntry<>(() -> element, PropertyNormalization.DEFAULT));
}
@Override
public T remove(int index) {
return delegate.remove(index).getValue();
}
@Override
public int indexOf(Object o) {
for (int i = 0; i < delegate.size(); i++) {
if (delegate.get(i).getValue().equals(o)) {
return i;
}
}
return -1;
}
@Override
public int lastIndexOf(Object o) {
int lastIndex = -1;
for (int i = 0; i < delegate.size(); i++) {
if (delegate.get(i).getValue().equals(o)) {
lastIndex = i;
}
}
return lastIndex;
}
@Override
public ListIterator<T> listIterator() {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).listIterator();
}
@Override
public ListIterator<T> listIterator(int index) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).listIterator(index);
}
@Override
public List<T> subList(int fromIndex, int toIndex) {
return delegate.stream()
.peek(this::validate)
.map(PropertyListEntry::getValue)
.collect(Collectors.toList())
.subList(fromIndex, toIndex);
}
@Override
public List<? extends PropertyListEntry<T>> getNormalizedCollection() {
return delegate.stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)
.collect(Collectors.toList());
}
/**
* Return a "flattened" collection. This should be used when the collection type is itself a complex type with properties
* annotated as Gradle inputs rather than a simple type like {@link String}.
*
* @return a flattened collection filtered according to normalization strategy
*/
public List<? extends T> getFlatNormalizedCollection() {
return getNormalizedCollection().stream().map(PropertyListEntry::getValue).collect(Collectors.toList());
}
private void validate(PropertyListEntry<T> entry) {
assertNotNull(entry.getValue(), "entry");
}
private
|
LazyPropertyList
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/dirty/DirtyTrackingDynamicUpdateTest.java
|
{
"start": 939,
"end": 2378
}
|
class ____ {
public static final int ID = 1;
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
TestEntity testEntity = new TestEntity( ID );
testEntity.setaSuper( "aSuper before" );
testEntity.setbSuper( "bSuper before" );
testEntity.setaChild( "aChild before" );
testEntity.setbChild( "bChild before" );
session.persist( testEntity );
}
);
}
@Test
public void testDynamicUpdate(SessionFactoryScope scope) {
String aSuperNewValue = "aSuper after";
String bSuperNewValue = "bSuper after";
String aChildNewValue = "aChild after";
String bChildNewValue = "bChild after";
scope.inTransaction(
session -> {
TestEntity entity = session.find( TestEntity.class, ID );
entity.setaSuper( aSuperNewValue );
entity.setbSuper( bSuperNewValue );
entity.setaChild( aChildNewValue );
entity.setbChild( bChildNewValue );
session.merge( entity );
}
);
scope.inTransaction(
session -> {
TestEntity entity = session.find( TestEntity.class, ID );
assertThat( entity.getaSuper() ).isEqualTo( aSuperNewValue );
assertThat( entity.getbSuper() ).isEqualTo( bSuperNewValue );
assertThat( entity.getaChild() ).isEqualTo( aChildNewValue );
assertThat( entity.getbChild() ).isEqualTo( bChildNewValue );
}
);
}
@Entity(name = "TestEntity")
public static
|
DirtyTrackingDynamicUpdateTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.