index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/retrofit/retrofit-converters/guava/src/test/java/retrofit/converter
Create_ds/retrofit/retrofit-converters/guava/src/test/java/retrofit/converter/guava/GuavaOptionalConverterFactoryTest.java
/* * Copyright (C) 2017 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit.converter.guava; import static org.assertj.core.api.Assertions.assertThat; import com.google.common.base.Optional; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.annotation.Nullable; import okhttp3.ResponseBody; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Converter; import retrofit2.Retrofit; import retrofit2.http.GET; public final class GuavaOptionalConverterFactoryTest { interface Service { @GET("/") Call<Optional<Object>> optional(); @GET("/") Call<Object> object(); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(GuavaOptionalConverterFactory.create()) .addConverterFactory(new AlwaysNullConverterFactory()) .build(); service = retrofit.create(Service.class); } @Test public void optional() throws IOException { server.enqueue(new MockResponse()); Optional<Object> optional = service.optional().execute().body(); assertThat(optional).isNotNull(); assertThat(optional.isPresent()).isFalse(); } @Test public void onlyMatchesOptional() throws IOException { server.enqueue(new MockResponse()); Object body = service.object().execute().body(); assertThat(body).isNull(); } @Test public void delegates() throws IOException { final Object object = new Object(); Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory( new Converter.Factory() { @Nullable @Override public Converter<ResponseBody, Object> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (getRawType(type) != Object.class) { return null; } return value -> object; } }) .addConverterFactory(GuavaOptionalConverterFactory.create()) .build(); server.enqueue(new MockResponse()); Service service = retrofit.create(Service.class); Optional<Object> optional = service.optional().execute().body(); assertThat(optional).isNotNull(); assertThat(optional.get()).isSameAs(object); } }
3,900
0
Create_ds/retrofit/retrofit-converters/guava/src/test/java/retrofit/converter
Create_ds/retrofit/retrofit-converters/guava/src/test/java/retrofit/converter/guava/AlwaysNullConverterFactory.java
/* * Copyright (C) 2017 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit.converter.guava; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; final class AlwaysNullConverterFactory extends Converter.Factory { @Override public Converter<ResponseBody, Object> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { return value -> null; } }
3,901
0
Create_ds/retrofit/retrofit-converters/guava/src/main/java/retrofit/converter
Create_ds/retrofit/retrofit-converters/guava/src/main/java/retrofit/converter/guava/OptionalConverter.java
/* * Copyright (C) 2017 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit.converter.guava; import com.google.common.base.Optional; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.Converter; final class OptionalConverter<T> implements Converter<ResponseBody, Optional<T>> { private final Converter<ResponseBody, T> delegate; OptionalConverter(Converter<ResponseBody, T> delegate) { this.delegate = delegate; } @Override public Optional<T> convert(ResponseBody value) throws IOException { return Optional.fromNullable(delegate.convert(value)); } }
3,902
0
Create_ds/retrofit/retrofit-converters/guava/src/main/java/retrofit/converter
Create_ds/retrofit/retrofit-converters/guava/src/main/java/retrofit/converter/guava/GuavaOptionalConverterFactory.java
/* * Copyright (C) 2017 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit.converter.guava; import com.google.common.base.Optional; import java.lang.annotation.Annotation; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import javax.annotation.Nullable; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} for {@code Optional<T>} which delegates to another * converter to deserialize {@code T} and then wraps it into {@link Optional}. */ public final class GuavaOptionalConverterFactory extends Converter.Factory { public static GuavaOptionalConverterFactory create() { return new GuavaOptionalConverterFactory(); } private GuavaOptionalConverterFactory() {} @Override public @Nullable Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (getRawType(type) != Optional.class) { return null; } Type innerType = getParameterUpperBound(0, (ParameterizedType) type); Converter<ResponseBody, Object> delegate = retrofit.responseBodyConverter(innerType, annotations); return new OptionalConverter<>(delegate); } }
3,903
0
Create_ds/retrofit/retrofit-converters/guava/src/main/java/retrofit/converter
Create_ds/retrofit/retrofit-converters/guava/src/main/java/retrofit/converter/guava/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit.converter.guava;
3,904
0
Create_ds/retrofit/retrofit-converters/scalars/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/scalars/src/test/java/retrofit2/converter/scalars/ScalarsConverterFactoryTest.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.scalars; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; import java.io.IOException; import okhttp3.ResponseBody; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.POST; public final class ScalarsConverterFactoryTest { interface Service { @POST("/") Call<ResponseBody> object(@Body Object body); @POST("/") Call<ResponseBody> stringObject(@Body String body); @POST("/") Call<ResponseBody> booleanPrimitive(@Body boolean body); @POST("/") Call<ResponseBody> booleanObject(@Body Boolean body); @POST("/") Call<ResponseBody> bytePrimitive(@Body byte body); @POST("/") Call<ResponseBody> byteObject(@Body Byte body); @POST("/") Call<ResponseBody> charPrimitive(@Body char body); @POST("/") Call<ResponseBody> charObject(@Body Character body); @POST("/") Call<ResponseBody> doublePrimitive(@Body double body); @POST("/") Call<ResponseBody> doubleObject(@Body Double body); @POST("/") Call<ResponseBody> floatPrimitive(@Body float body); @POST("/") Call<ResponseBody> floatObject(@Body Float body); @POST("/") Call<ResponseBody> integerPrimitive(@Body int body); @POST("/") Call<ResponseBody> integerObject(@Body Integer body); @POST("/") Call<ResponseBody> longPrimitive(@Body long body); @POST("/") Call<ResponseBody> longObject(@Body Long body); @POST("/") Call<ResponseBody> shortPrimitive(@Body short body); @POST("/") Call<ResponseBody> shortObject(@Body Short body); @GET("/") Call<Object> object(); @GET("/") Call<String> stringObject(); @GET("/") Call<Boolean> booleanObject(); @GET("/") Call<Byte> byteObject(); @GET("/") Call<Character> charObject(); @GET("/") Call<Double> doubleObject(); @GET("/") Call<Float> floatObject(); @GET("/") Call<Integer> integerObject(); @GET("/") Call<Long> longObject(); @GET("/") Call<Short> shortObject(); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(ScalarsConverterFactory.create()) .build(); service = retrofit.create(Service.class); } @Test public void unsupportedRequestTypesNotMatched() { try { service.object(null); fail(); } catch (IllegalArgumentException e) { assertThat(e) .hasMessage( "" + "Unable to create @Body converter for class java.lang.Object (parameter #1)\n" + " for method Service.object"); assertThat(e.getCause()) .hasMessage( "" + "Could not locate RequestBody converter for class java.lang.Object.\n" + " Tried:\n" + " * retrofit2.BuiltInConverters\n" + " * retrofit2.converter.scalars.ScalarsConverterFactory\n" + " * retrofit2.OptionalConverterFactory"); } } @Test public void supportedRequestTypes() throws IOException, InterruptedException { RecordedRequest request; server.enqueue(new MockResponse()); service.stringObject("string").execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("6"); assertThat(request.getBody().readUtf8()).isEqualTo("string"); server.enqueue(new MockResponse()); service.booleanPrimitive(true).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("4"); assertThat(request.getBody().readUtf8()).isEqualTo("true"); server.enqueue(new MockResponse()); service.booleanObject(false).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("5"); assertThat(request.getBody().readUtf8()).isEqualTo("false"); server.enqueue(new MockResponse()); service.bytePrimitive((byte) 0).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("0"); server.enqueue(new MockResponse()); service.byteObject((byte) 1).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("1"); server.enqueue(new MockResponse()); service.charPrimitive('a').execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("a"); server.enqueue(new MockResponse()); service.charObject('b').execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("b"); server.enqueue(new MockResponse()); service.doublePrimitive(2.2d).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("3"); assertThat(request.getBody().readUtf8()).isEqualTo("2.2"); server.enqueue(new MockResponse()); service.doubleObject(3.3d).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("3"); assertThat(request.getBody().readUtf8()).isEqualTo("3.3"); server.enqueue(new MockResponse()); service.floatPrimitive(4.4f).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("3"); assertThat(request.getBody().readUtf8()).isEqualTo("4.4"); server.enqueue(new MockResponse()); service.floatObject(5.5f).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("3"); assertThat(request.getBody().readUtf8()).isEqualTo("5.5"); server.enqueue(new MockResponse()); service.integerPrimitive(6).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("6"); server.enqueue(new MockResponse()); service.integerObject(7).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("7"); server.enqueue(new MockResponse()); service.longPrimitive(8L).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("8"); server.enqueue(new MockResponse()); service.longObject(9L).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("1"); assertThat(request.getBody().readUtf8()).isEqualTo("9"); server.enqueue(new MockResponse()); service.shortPrimitive((short) 10).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("2"); assertThat(request.getBody().readUtf8()).isEqualTo("10"); server.enqueue(new MockResponse()); service.shortObject((short) 11).execute(); request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("text/plain; charset=UTF-8"); assertThat(request.getHeader("Content-Length")).isEqualTo("2"); assertThat(request.getBody().readUtf8()).isEqualTo("11"); } @Test public void unsupportedResponseTypesNotMatched() { try { service.object(); fail(); } catch (IllegalArgumentException e) { assertThat(e) .hasMessage( "" + "Unable to create converter for class java.lang.Object\n" + " for method Service.object"); assertThat(e.getCause()) .hasMessage( "" + "Could not locate ResponseBody converter for class java.lang.Object.\n" + " Tried:\n" + " * retrofit2.BuiltInConverters\n" + " * retrofit2.converter.scalars.ScalarsConverterFactory\n" + " * retrofit2.OptionalConverterFactory"); } } @Test public void supportedResponseTypes() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("test")); Response<String> stringResponse = service.stringObject().execute(); assertThat(stringResponse.body()).isEqualTo("test"); server.enqueue(new MockResponse().setBody("true")); Response<Boolean> booleanResponse = service.booleanObject().execute(); assertThat(booleanResponse.body()).isTrue(); server.enqueue(new MockResponse().setBody("5")); Response<Byte> byteResponse = service.byteObject().execute(); assertThat(byteResponse.body()).isEqualTo((byte) 5); server.enqueue(new MockResponse().setBody("b")); Response<Character> characterResponse = service.charObject().execute(); assertThat(characterResponse.body()).isEqualTo('b'); server.enqueue(new MockResponse().setBody("")); try { service.charObject().execute(); fail(); } catch (IOException e) { assertThat(e).hasMessage("Expected body of length 1 for Character conversion but was 0"); } server.enqueue(new MockResponse().setBody("bb")); try { service.charObject().execute(); fail(); } catch (IOException e) { assertThat(e).hasMessage("Expected body of length 1 for Character conversion but was 2"); } server.enqueue(new MockResponse().setBody("13.13")); Response<Double> doubleResponse = service.doubleObject().execute(); assertThat(doubleResponse.body()).isEqualTo(13.13); server.enqueue(new MockResponse().setBody("13.13")); Response<Float> floatResponse = service.floatObject().execute(); assertThat(floatResponse.body()).isEqualTo(13.13f); server.enqueue(new MockResponse().setBody("13")); Response<Integer> integerResponse = service.integerObject().execute(); assertThat(integerResponse.body()).isEqualTo(13); server.enqueue(new MockResponse().setBody("1347")); Response<Long> longResponse = service.longObject().execute(); assertThat(longResponse.body()).isEqualTo(1347L); server.enqueue(new MockResponse().setBody("134")); Response<Short> shortResponse = service.shortObject().execute(); assertThat(shortResponse.body()).isEqualTo((short) 134); } }
3,905
0
Create_ds/retrofit/retrofit-converters/scalars/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/scalars/src/test/java/retrofit2/converter/scalars/ScalarsConverterPrimitivesFactoryTest.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.scalars; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.CallAdapter; import retrofit2.Retrofit; import retrofit2.http.GET; public final class ScalarsConverterPrimitivesFactoryTest { interface Service { @GET("/") boolean booleanPrimitive(); @GET("/") byte bytePrimitive(); @GET("/") char charPrimitive(); @GET("/") double doublePrimitive(); @GET("/") float floatPrimitive(); @GET("/") int integerPrimitive(); @GET("/") long longPrimitive(); @GET("/") short shortPrimitive(); } static class DirectCallIOException extends RuntimeException { DirectCallIOException(String message, IOException e) { super(message, e); } } static class DirectCallAdapterFactory extends CallAdapter.Factory { @Override public CallAdapter<?, ?> get( final Type returnType, Annotation[] annotations, Retrofit retrofit) { return new CallAdapter<Object, Object>() { @Override public Type responseType() { return returnType; } @Override public Object adapt(Call call) { try { return call.execute().body(); } catch (IOException e) { throw new DirectCallIOException(e.getMessage(), e); } } }; } } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(ScalarsConverterFactory.create()) .addCallAdapterFactory(new DirectCallAdapterFactory()) .build(); service = retrofit.create(Service.class); } @Test public void supportedResponseTypes() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("true")); boolean booleanResponse = service.booleanPrimitive(); assertThat(booleanResponse).isTrue(); server.enqueue(new MockResponse().setBody("5")); byte byteResponse = service.bytePrimitive(); assertThat(byteResponse).isEqualTo((byte) 5); server.enqueue(new MockResponse().setBody("b")); char characterResponse = service.charPrimitive(); assertThat(characterResponse).isEqualTo('b'); server.enqueue(new MockResponse().setBody("")); try { service.charPrimitive(); fail(); } catch (DirectCallIOException e) { assertThat(e).hasMessage("Expected body of length 1 for Character conversion but was 0"); } server.enqueue(new MockResponse().setBody("bb")); try { service.charPrimitive(); fail(); } catch (DirectCallIOException e) { assertThat(e).hasMessage("Expected body of length 1 for Character conversion but was 2"); } server.enqueue(new MockResponse().setBody("13.13")); double doubleResponse = service.doublePrimitive(); assertThat(doubleResponse).isEqualTo(13.13); server.enqueue(new MockResponse().setBody("13.13")); float floatResponse = service.floatPrimitive(); assertThat(floatResponse).isEqualTo(13.13f); server.enqueue(new MockResponse().setBody("13")); int integerResponse = service.integerPrimitive(); assertThat(integerResponse).isEqualTo(13); server.enqueue(new MockResponse().setBody("1347")); long longResponse = service.longPrimitive(); assertThat(longResponse).isEqualTo(1347L); server.enqueue(new MockResponse().setBody("134")); short shortResponse = service.shortPrimitive(); assertThat(shortResponse).isEqualTo((short) 134); } }
3,906
0
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter/scalars/ScalarResponseBodyConverters.java
/* * Copyright (C) 2016 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.scalars; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.Converter; final class ScalarResponseBodyConverters { private ScalarResponseBodyConverters() {} static final class StringResponseBodyConverter implements Converter<ResponseBody, String> { static final StringResponseBodyConverter INSTANCE = new StringResponseBodyConverter(); @Override public String convert(ResponseBody value) throws IOException { return value.string(); } } static final class BooleanResponseBodyConverter implements Converter<ResponseBody, Boolean> { static final BooleanResponseBodyConverter INSTANCE = new BooleanResponseBodyConverter(); @Override public Boolean convert(ResponseBody value) throws IOException { return Boolean.valueOf(value.string()); } } static final class ByteResponseBodyConverter implements Converter<ResponseBody, Byte> { static final ByteResponseBodyConverter INSTANCE = new ByteResponseBodyConverter(); @Override public Byte convert(ResponseBody value) throws IOException { return Byte.valueOf(value.string()); } } static final class CharacterResponseBodyConverter implements Converter<ResponseBody, Character> { static final CharacterResponseBodyConverter INSTANCE = new CharacterResponseBodyConverter(); @Override public Character convert(ResponseBody value) throws IOException { String body = value.string(); if (body.length() != 1) { throw new IOException( "Expected body of length 1 for Character conversion but was " + body.length()); } return body.charAt(0); } } static final class DoubleResponseBodyConverter implements Converter<ResponseBody, Double> { static final DoubleResponseBodyConverter INSTANCE = new DoubleResponseBodyConverter(); @Override public Double convert(ResponseBody value) throws IOException { return Double.valueOf(value.string()); } } static final class FloatResponseBodyConverter implements Converter<ResponseBody, Float> { static final FloatResponseBodyConverter INSTANCE = new FloatResponseBodyConverter(); @Override public Float convert(ResponseBody value) throws IOException { return Float.valueOf(value.string()); } } static final class IntegerResponseBodyConverter implements Converter<ResponseBody, Integer> { static final IntegerResponseBodyConverter INSTANCE = new IntegerResponseBodyConverter(); @Override public Integer convert(ResponseBody value) throws IOException { return Integer.valueOf(value.string()); } } static final class LongResponseBodyConverter implements Converter<ResponseBody, Long> { static final LongResponseBodyConverter INSTANCE = new LongResponseBodyConverter(); @Override public Long convert(ResponseBody value) throws IOException { return Long.valueOf(value.string()); } } static final class ShortResponseBodyConverter implements Converter<ResponseBody, Short> { static final ShortResponseBodyConverter INSTANCE = new ShortResponseBodyConverter(); @Override public Short convert(ResponseBody value) throws IOException { return Short.valueOf(value.string()); } } }
3,907
0
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter/scalars/ScalarsConverterFactory.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.scalars; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.annotation.Nullable; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; import retrofit2.converter.scalars.ScalarResponseBodyConverters.BooleanResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.ByteResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.CharacterResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.DoubleResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.FloatResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.IntegerResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.LongResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.ShortResponseBodyConverter; import retrofit2.converter.scalars.ScalarResponseBodyConverters.StringResponseBodyConverter; /** * A {@linkplain Converter.Factory converter} for strings and both primitives and their boxed types * to {@code text/plain} bodies. */ public final class ScalarsConverterFactory extends Converter.Factory { public static ScalarsConverterFactory create() { return new ScalarsConverterFactory(); } private ScalarsConverterFactory() {} @Override public @Nullable Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { if (type == String.class || type == boolean.class || type == Boolean.class || type == byte.class || type == Byte.class || type == char.class || type == Character.class || type == double.class || type == Double.class || type == float.class || type == Float.class || type == int.class || type == Integer.class || type == long.class || type == Long.class || type == short.class || type == Short.class) { return ScalarRequestBodyConverter.INSTANCE; } return null; } @Override public @Nullable Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (type == String.class) { return StringResponseBodyConverter.INSTANCE; } if (type == Boolean.class || type == boolean.class) { return BooleanResponseBodyConverter.INSTANCE; } if (type == Byte.class || type == byte.class) { return ByteResponseBodyConverter.INSTANCE; } if (type == Character.class || type == char.class) { return CharacterResponseBodyConverter.INSTANCE; } if (type == Double.class || type == double.class) { return DoubleResponseBodyConverter.INSTANCE; } if (type == Float.class || type == float.class) { return FloatResponseBodyConverter.INSTANCE; } if (type == Integer.class || type == int.class) { return IntegerResponseBodyConverter.INSTANCE; } if (type == Long.class || type == long.class) { return LongResponseBodyConverter.INSTANCE; } if (type == Short.class || type == short.class) { return ShortResponseBodyConverter.INSTANCE; } return null; } }
3,908
0
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter/scalars/ScalarRequestBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.scalars; import java.io.IOException; import okhttp3.MediaType; import okhttp3.RequestBody; import retrofit2.Converter; final class ScalarRequestBodyConverter<T> implements Converter<T, RequestBody> { static final ScalarRequestBodyConverter<Object> INSTANCE = new ScalarRequestBodyConverter<>(); private static final MediaType MEDIA_TYPE = MediaType.get("text/plain; charset=UTF-8"); private ScalarRequestBodyConverter() {} @Override public RequestBody convert(T value) throws IOException { return RequestBody.create(MEDIA_TYPE, String.valueOf(value)); } }
3,909
0
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/scalars/src/main/java/retrofit2/converter/scalars/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.scalars;
3,910
0
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter/jaxb/Type.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; enum Type { OTHER, MOBILE }
3,911
0
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter/jaxb/Contact.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; @XmlRootElement(name = "contact") final class Contact { @XmlElement(required = true) public final String name; @XmlElement(name = "phone_number") public final List<PhoneNumber> phone_numbers; @SuppressWarnings("unused") // Used by JAXB. private Contact() { this("", new ArrayList<PhoneNumber>()); } public Contact(String name, List<PhoneNumber> phoneNumbers) { this.name = name; this.phone_numbers = phoneNumbers; } @Override public boolean equals(Object o) { return o instanceof Contact && ((Contact) o).name.equals(name) && ((Contact) o).phone_numbers.equals(phone_numbers); } @Override public int hashCode() { return Arrays.asList(name, phone_numbers).hashCode(); } }
3,912
0
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter/jaxb/JaxbConverterFactoryTest.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import static junit.framework.TestCase.fail; import static org.assertj.core.api.Assertions.assertThat; import java.util.Collections; import javax.xml.bind.JAXBContext; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.POST; public final class JaxbConverterFactoryTest { static final Contact SAMPLE_CONTACT = new Contact("Jenny", Collections.singletonList(new PhoneNumber("867-5309", Type.MOBILE))); static final String SAMPLE_CONTACT_XML = "" + "<?xml version=\"1.0\" ?>" + "<contact>" + "<name>Jenny</name>" + "<phone_number type=\"MOBILE\">" + "<number>867-5309</number>" + "</phone_number>" + "</contact>"; interface Service { @POST("/") Call<Void> postXml(@Body Contact contact); @GET("/") Call<Contact> getXml(); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { JaxbConverterFactory factory = JaxbConverterFactory.create(); Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factory).build(); service = retrofit.create(Service.class); } @Test public void xmlRequestBody() throws Exception { server.enqueue(new MockResponse()); Call<Void> call = service.postXml(SAMPLE_CONTACT); call.execute(); RecordedRequest request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("application/xml; charset=utf-8"); assertThat(request.getBody().readUtf8()).isEqualTo(SAMPLE_CONTACT_XML); } @Test public void xmlResponseBody() throws Exception { server.enqueue(new MockResponse().setBody(SAMPLE_CONTACT_XML)); Call<Contact> call = service.getXml(); Response<Contact> response = call.execute(); assertThat(response.body()).isEqualTo(SAMPLE_CONTACT); } @Test public void characterEncoding() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<contact>" + "<name>Бронтозавр \uD83E\uDD95 ティラノサウルス・レックス &#129430;</name>" + "</contact>")); Call<Contact> call = service.getXml(); Response<Contact> response = call.execute(); assertThat(response.body().name) .isEqualTo("Бронтозавр \uD83E\uDD95 ティラノサウルス・レックス \uD83E\uDD96"); } @Test public void userSuppliedJaxbContext() throws Exception { JAXBContext context = JAXBContext.newInstance(Contact.class); JaxbConverterFactory factory = JaxbConverterFactory.create(context); Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factory).build(); service = retrofit.create(Service.class); server.enqueue(new MockResponse()); Call<Void> call = service.postXml(SAMPLE_CONTACT); call.execute(); RecordedRequest request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("application/xml; charset=utf-8"); assertThat(request.getBody().readUtf8()).isEqualTo(SAMPLE_CONTACT_XML); } @Test public void malformedXml() throws Exception { server.enqueue(new MockResponse().setBody("This is not XML")); Call<Contact> call = service.getXml(); try { call.execute(); fail(); } catch (RuntimeException expected) { assertThat(expected).hasMessageContaining("ParseError"); } } @Test public void unrecognizedField() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<contact>" + "<name>Jenny</name>" + "<age>21</age>" + "<phone_number type=\"FAX\">" + "<number>867-5309</number>" + "</phone_number>" + "</contact>")); Call<Contact> call = service.getXml(); Response<Contact> response = call.execute(); assertThat(response.body().name).isEqualTo("Jenny"); } @Test public void externalEntity() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<!DOCTYPE contact[" + " <!ENTITY secret SYSTEM \"" + server.url("/secret.txt") + "\">" + "]>" + "<contact>" + "<name>&secret;</name>" + "</contact>")); server.enqueue(new MockResponse().setBody("hello")); Call<Contact> call = service.getXml(); try { Response<Contact> response = call.execute(); response.body(); fail(); } catch (RuntimeException expected) { assertThat(expected).hasMessageContaining("ParseError"); } assertThat(server.getRequestCount()).isEqualTo(1); } @Test public void externalDtd() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<!DOCTYPE contact SYSTEM \"" + server.url("/contact.dtd") + "\">" + "<contact>" + "<name>&secret;</name>" + "</contact>")); server.enqueue( new MockResponse() .setBody( "" + "<!ELEMENT contact (name)>\n" + "<!ELEMENT name (#PCDATA)>\n" + "<!ENTITY secret \"hello\">")); Call<Contact> call = service.getXml(); try { Response<Contact> response = call.execute(); response.body(); fail(); } catch (RuntimeException expected) { assertThat(expected).hasMessageContaining("ParseError"); } assertThat(server.getRequestCount()).isEqualTo(1); } }
3,913
0
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/test/java/retrofit2/converter/jaxb/PhoneNumber.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import java.util.Arrays; import javax.annotation.Nullable; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; final class PhoneNumber { @XmlElement(required = true) public final String number; @XmlAttribute public final Type type; @SuppressWarnings("unused") // Used by JAXB. private PhoneNumber() { this("", Type.OTHER); } PhoneNumber(String number, @Nullable Type type) { this.number = number; this.type = type; } @Override public boolean equals(Object o) { return o instanceof PhoneNumber && ((PhoneNumber) o).number.equals(number) && ((PhoneNumber) o).type.equals(type); } @Override public int hashCode() { return Arrays.asList(number, type).hashCode(); } }
3,914
0
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter/jaxb/JaxbConverterFactory.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.annotation.Nullable; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.annotation.XmlRootElement; import okhttp3.MediaType; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} which uses JAXB for XML. All validation events are * ignored. */ public final class JaxbConverterFactory extends Converter.Factory { static final MediaType XML = MediaType.get("application/xml; charset=utf-8"); /** Create an instance using a default {@link JAXBContext} instance for conversion. */ public static JaxbConverterFactory create() { return new JaxbConverterFactory(null); } /** Create an instance using {@code context} for conversion. */ @SuppressWarnings("ConstantConditions") // Guarding public API nullability. public static JaxbConverterFactory create(JAXBContext context) { if (context == null) throw new NullPointerException("context == null"); return new JaxbConverterFactory(context); } /** If null, a new JAXB context will be created for each type to be converted. */ private final @Nullable JAXBContext context; private JaxbConverterFactory(@Nullable JAXBContext context) { this.context = context; } @Override public @Nullable Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { if (type instanceof Class && ((Class<?>) type).isAnnotationPresent(XmlRootElement.class)) { return new JaxbRequestConverter<>(contextForType((Class<?>) type), (Class<?>) type); } return null; } @Override public @Nullable Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (type instanceof Class && ((Class<?>) type).isAnnotationPresent(XmlRootElement.class)) { return new JaxbResponseConverter<>(contextForType((Class<?>) type), (Class<?>) type); } return null; } private JAXBContext contextForType(Class<?> type) { try { return context != null ? context : JAXBContext.newInstance(type); } catch (JAXBException e) { throw new IllegalArgumentException(e); } } }
3,915
0
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter/jaxb/JaxbRequestConverter.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import java.io.IOException; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import okhttp3.RequestBody; import okio.Buffer; import retrofit2.Converter; final class JaxbRequestConverter<T> implements Converter<T, RequestBody> { final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newInstance(); final JAXBContext context; final Class<T> type; JaxbRequestConverter(JAXBContext context, Class<T> type) { this.context = context; this.type = type; } @Override public RequestBody convert(final T value) throws IOException { Buffer buffer = new Buffer(); try { Marshaller marshaller = context.createMarshaller(); XMLStreamWriter xmlWriter = xmlOutputFactory.createXMLStreamWriter( buffer.outputStream(), JaxbConverterFactory.XML.charset().name()); marshaller.marshal(value, xmlWriter); } catch (JAXBException | XMLStreamException e) { throw new RuntimeException(e); } return RequestBody.create(JaxbConverterFactory.XML, buffer.readByteString()); } }
3,916
0
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter/jaxb/JaxbResponseConverter.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import java.io.IOException; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import okhttp3.ResponseBody; import retrofit2.Converter; final class JaxbResponseConverter<T> implements Converter<ResponseBody, T> { final XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance(); final JAXBContext context; final Class<T> type; JaxbResponseConverter(JAXBContext context, Class<T> type) { this.context = context; this.type = type; // Prevent XML External Entity attacks (XXE). xmlInputFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false); xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); } @Override public T convert(ResponseBody value) throws IOException { try { Unmarshaller unmarshaller = context.createUnmarshaller(); XMLStreamReader streamReader = xmlInputFactory.createXMLStreamReader(value.charStream()); return unmarshaller.unmarshal(streamReader, type).getValue(); } catch (JAXBException | XMLStreamException e) { throw new RuntimeException(e); } finally { value.close(); } } }
3,917
0
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb/src/main/java/retrofit2/converter/jaxb/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.jaxb;
3,918
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter/jaxb/Type.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; enum Type { OTHER, MOBILE }
3,919
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter/jaxb/Contact.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import jakarta.xml.bind.annotation.XmlElement; import jakarta.xml.bind.annotation.XmlRootElement; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @XmlRootElement(name = "contact") final class Contact { @XmlElement(required = true) public final String name; @XmlElement(name = "phone_number") public final List<PhoneNumber> phone_numbers; @SuppressWarnings("unused") // Used by JAXB. private Contact() { this("", new ArrayList<PhoneNumber>()); } public Contact(String name, List<PhoneNumber> phoneNumbers) { this.name = name; this.phone_numbers = phoneNumbers; } @Override public boolean equals(Object o) { return o instanceof Contact && ((Contact) o).name.equals(name) && ((Contact) o).phone_numbers.equals(phone_numbers); } @Override public int hashCode() { return Arrays.asList(name, phone_numbers).hashCode(); } }
3,920
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter/jaxb/JaxbConverterFactoryTest.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import static junit.framework.TestCase.fail; import static org.assertj.core.api.Assertions.assertThat; import jakarta.xml.bind.JAXBContext; import java.util.Collections; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.POST; public final class JaxbConverterFactoryTest { static final Contact SAMPLE_CONTACT = new Contact("Jenny", Collections.singletonList(new PhoneNumber("867-5309", Type.MOBILE))); static final String SAMPLE_CONTACT_XML = "" + "<?xml version=\"1.0\" ?>" + "<contact>" + "<name>Jenny</name>" + "<phone_number type=\"MOBILE\">" + "<number>867-5309</number>" + "</phone_number>" + "</contact>"; interface Service { @POST("/") Call<Void> postXml(@Body Contact contact); @GET("/") Call<Contact> getXml(); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { JaxbConverterFactory factory = JaxbConverterFactory.create(); Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factory).build(); service = retrofit.create(Service.class); } @Test public void xmlRequestBody() throws Exception { server.enqueue(new MockResponse()); Call<Void> call = service.postXml(SAMPLE_CONTACT); call.execute(); RecordedRequest request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("application/xml; charset=utf-8"); assertThat(request.getBody().readUtf8()).isEqualTo(SAMPLE_CONTACT_XML); } @Test public void xmlResponseBody() throws Exception { server.enqueue(new MockResponse().setBody(SAMPLE_CONTACT_XML)); Call<Contact> call = service.getXml(); Response<Contact> response = call.execute(); assertThat(response.body()).isEqualTo(SAMPLE_CONTACT); } @Test public void characterEncoding() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<contact>" + "<name>Бронтозавр \uD83E\uDD95 ティラノサウルス・レックス &#129430;</name>" + "</contact>")); Call<Contact> call = service.getXml(); Response<Contact> response = call.execute(); assertThat(response.body().name) .isEqualTo("Бронтозавр \uD83E\uDD95 ティラノサウルス・レックス \uD83E\uDD96"); } @Test public void userSuppliedJaxbContext() throws Exception { JAXBContext context = JAXBContext.newInstance(Contact.class); JaxbConverterFactory factory = JaxbConverterFactory.create(context); Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factory).build(); service = retrofit.create(Service.class); server.enqueue(new MockResponse()); Call<Void> call = service.postXml(SAMPLE_CONTACT); call.execute(); RecordedRequest request = server.takeRequest(); assertThat(request.getHeader("Content-Type")).isEqualTo("application/xml; charset=utf-8"); assertThat(request.getBody().readUtf8()).isEqualTo(SAMPLE_CONTACT_XML); } @Test public void malformedXml() throws Exception { server.enqueue(new MockResponse().setBody("This is not XML")); Call<Contact> call = service.getXml(); try { call.execute(); fail(); } catch (RuntimeException expected) { assertThat(expected).hasMessageContaining("ParseError"); } } @Test public void unrecognizedField() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<contact>" + "<name>Jenny</name>" + "<age>21</age>" + "<phone_number type=\"FAX\">" + "<number>867-5309</number>" + "</phone_number>" + "</contact>")); Call<Contact> call = service.getXml(); Response<Contact> response = call.execute(); assertThat(response.body().name).isEqualTo("Jenny"); } @Test public void externalEntity() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<!DOCTYPE contact[" + " <!ENTITY secret SYSTEM \"" + server.url("/secret.txt") + "\">" + "]>" + "<contact>" + "<name>&secret;</name>" + "</contact>")); server.enqueue(new MockResponse().setBody("hello")); Call<Contact> call = service.getXml(); try { Response<Contact> response = call.execute(); response.body(); fail(); } catch (RuntimeException expected) { assertThat(expected).hasMessageContaining("ParseError"); } assertThat(server.getRequestCount()).isEqualTo(1); } @Test public void externalDtd() throws Exception { server.enqueue( new MockResponse() .setBody( "" + "<?xml version=\"1.0\" ?>" + "<!DOCTYPE contact SYSTEM \"" + server.url("/contact.dtd") + "\">" + "<contact>" + "<name>&secret;</name>" + "</contact>")); server.enqueue( new MockResponse() .setBody( "" + "<!ELEMENT contact (name)>\n" + "<!ELEMENT name (#PCDATA)>\n" + "<!ENTITY secret \"hello\">")); Call<Contact> call = service.getXml(); try { Response<Contact> response = call.execute(); response.body(); fail(); } catch (RuntimeException expected) { assertThat(expected).hasMessageContaining("ParseError"); } assertThat(server.getRequestCount()).isEqualTo(1); } }
3,921
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/test/java/retrofit2/converter/jaxb/PhoneNumber.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import jakarta.xml.bind.annotation.XmlAttribute; import jakarta.xml.bind.annotation.XmlElement; import java.util.Arrays; import javax.annotation.Nullable; final class PhoneNumber { @XmlElement(required = true) public final String number; @XmlAttribute public final Type type; @SuppressWarnings("unused") // Used by JAXB. private PhoneNumber() { this("", Type.OTHER); } PhoneNumber(String number, @Nullable Type type) { this.number = number; this.type = type; } @Override public boolean equals(Object o) { return o instanceof PhoneNumber && ((PhoneNumber) o).number.equals(number) && ((PhoneNumber) o).type.equals(type); } @Override public int hashCode() { return Arrays.asList(number, type).hashCode(); } }
3,922
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter/jaxb/JaxbConverterFactory.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import jakarta.xml.bind.JAXBContext; import jakarta.xml.bind.JAXBException; import jakarta.xml.bind.annotation.XmlRootElement; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.annotation.Nullable; import okhttp3.MediaType; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} which uses JAXB for XML. All validation events are * ignored. */ public final class JaxbConverterFactory extends Converter.Factory { static final MediaType XML = MediaType.get("application/xml; charset=utf-8"); /** Create an instance using a default {@link JAXBContext} instance for conversion. */ public static JaxbConverterFactory create() { return new JaxbConverterFactory(null); } /** Create an instance using {@code context} for conversion. */ @SuppressWarnings("ConstantConditions") // Guarding public API nullability. public static JaxbConverterFactory create(JAXBContext context) { if (context == null) throw new NullPointerException("context == null"); return new JaxbConverterFactory(context); } /** If null, a new JAXB context will be created for each type to be converted. */ private final @Nullable JAXBContext context; private JaxbConverterFactory(@Nullable JAXBContext context) { this.context = context; } @Override public @Nullable Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { if (type instanceof Class && ((Class<?>) type).isAnnotationPresent(XmlRootElement.class)) { return new JaxbRequestConverter<>(contextForType((Class<?>) type), (Class<?>) type); } return null; } @Override public @Nullable Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (type instanceof Class && ((Class<?>) type).isAnnotationPresent(XmlRootElement.class)) { return new JaxbResponseConverter<>(contextForType((Class<?>) type), (Class<?>) type); } return null; } private JAXBContext contextForType(Class<?> type) { try { return context != null ? context : JAXBContext.newInstance(type); } catch (JAXBException e) { throw new IllegalArgumentException(e); } } }
3,923
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter/jaxb/JaxbRequestConverter.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import jakarta.xml.bind.JAXBContext; import jakarta.xml.bind.JAXBException; import jakarta.xml.bind.Marshaller; import java.io.IOException; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import okhttp3.RequestBody; import okio.Buffer; import retrofit2.Converter; final class JaxbRequestConverter<T> implements Converter<T, RequestBody> { final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newInstance(); final JAXBContext context; final Class<T> type; JaxbRequestConverter(JAXBContext context, Class<T> type) { this.context = context; this.type = type; } @Override public RequestBody convert(final T value) throws IOException { Buffer buffer = new Buffer(); try { Marshaller marshaller = context.createMarshaller(); XMLStreamWriter xmlWriter = xmlOutputFactory.createXMLStreamWriter( buffer.outputStream(), JaxbConverterFactory.XML.charset().name()); marshaller.marshal(value, xmlWriter); } catch (JAXBException | XMLStreamException e) { throw new RuntimeException(e); } return RequestBody.create(JaxbConverterFactory.XML, buffer.readByteString()); } }
3,924
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter/jaxb/JaxbResponseConverter.java
/* * Copyright (C) 2018 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jaxb; import jakarta.xml.bind.JAXBContext; import jakarta.xml.bind.JAXBException; import jakarta.xml.bind.Unmarshaller; import java.io.IOException; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import okhttp3.ResponseBody; import retrofit2.Converter; final class JaxbResponseConverter<T> implements Converter<ResponseBody, T> { final XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance(); final JAXBContext context; final Class<T> type; JaxbResponseConverter(JAXBContext context, Class<T> type) { this.context = context; this.type = type; // Prevent XML External Entity attacks (XXE). xmlInputFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false); xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); } @Override public T convert(ResponseBody value) throws IOException { try { Unmarshaller unmarshaller = context.createUnmarshaller(); XMLStreamReader streamReader = xmlInputFactory.createXMLStreamReader(value.charStream()); return unmarshaller.unmarshal(streamReader, type).getValue(); } catch (JAXBException | XMLStreamException e) { throw new RuntimeException(e); } finally { value.close(); } } }
3,925
0
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jaxb3/src/main/java/retrofit2/converter/jaxb/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.jaxb;
3,926
0
Create_ds/retrofit/retrofit-converters/moshi/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/moshi/src/test/java/retrofit2/converter/moshi/MoshiConverterFactoryTest.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.moshi; import static java.lang.annotation.RetentionPolicy.RUNTIME; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import com.squareup.moshi.FromJson; import com.squareup.moshi.JsonDataException; import com.squareup.moshi.JsonQualifier; import com.squareup.moshi.JsonReader; import com.squareup.moshi.JsonWriter; import com.squareup.moshi.Moshi; import com.squareup.moshi.ToJson; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.annotation.Retention; import java.nio.charset.StandardCharsets; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import okio.Buffer; import okio.ByteString; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.POST; public final class MoshiConverterFactoryTest { @Retention(RUNTIME) @JsonQualifier @interface Qualifier {} @Retention(RUNTIME) @interface NonQualifer {} interface AnInterface { String getName(); } static class AnImplementation implements AnInterface { private final String theName; AnImplementation(String name) { theName = name; } @Override public String getName() { return theName; } } static final class Value { final String theName; Value(String theName) { this.theName = theName; } } static class Adapters { @ToJson public void write(JsonWriter jsonWriter, AnInterface anInterface) throws IOException { jsonWriter.beginObject(); jsonWriter.name("name").value(anInterface.getName()); jsonWriter.endObject(); } @FromJson public AnInterface read(JsonReader jsonReader) throws IOException { jsonReader.beginObject(); String name = null; while (jsonReader.hasNext()) { switch (jsonReader.nextName()) { case "name": name = jsonReader.nextString(); break; } } jsonReader.endObject(); return new AnImplementation(name); } @ToJson public void write(JsonWriter writer, @Qualifier String value) throws IOException { writer.value("qualified!"); } @FromJson @Qualifier public String readQualified(JsonReader reader) throws IOException { String string = reader.nextString(); if (string.equals("qualified!")) { return "it worked!"; } throw new AssertionError("Found: " + string); } @FromJson public Value readWithoutEndingObject(JsonReader reader) throws IOException { reader.beginObject(); reader.skipName(); String theName = reader.nextString(); return new Value(theName); } } interface Service { @POST("/") Call<AnImplementation> anImplementation(@Body AnImplementation impl); @POST("/") Call<AnInterface> anInterface(@Body AnInterface impl); @GET("/") Call<Value> value(); @POST("/") @Qualifier @NonQualifer // Call<String> annotations(@Body @Qualifier @NonQualifer String body); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; private Service serviceLenient; private Service serviceNulls; private Service serviceFailOnUnknown; @Before public void setUp() { Moshi moshi = new Moshi.Builder() .add( (type, annotations, moshi1) -> { for (Annotation annotation : annotations) { if (!annotation.annotationType().isAnnotationPresent(JsonQualifier.class)) { throw new AssertionError("Non-@JsonQualifier annotation: " + annotation); } } return null; }) .add(new Adapters()) .build(); MoshiConverterFactory factory = MoshiConverterFactory.create(moshi); MoshiConverterFactory factoryLenient = factory.asLenient(); MoshiConverterFactory factoryNulls = factory.withNullSerialization(); MoshiConverterFactory factoryFailOnUnknown = factory.failOnUnknown(); Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factory).build(); Retrofit retrofitLenient = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factoryLenient).build(); Retrofit retrofitNulls = new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factoryNulls).build(); Retrofit retrofitFailOnUnknown = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(factoryFailOnUnknown) .build(); service = retrofit.create(Service.class); serviceLenient = retrofitLenient.create(Service.class); serviceNulls = retrofitNulls.create(Service.class); serviceFailOnUnknown = retrofitFailOnUnknown.create(Service.class); } @Test public void anInterface() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("{\"name\":\"value\"}")); Call<AnInterface> call = service.anInterface(new AnImplementation("value")); Response<AnInterface> response = call.execute(); AnInterface body = response.body(); assertThat(body.getName()).isEqualTo("value"); RecordedRequest request = server.takeRequest(); assertThat(request.getBody().readUtf8()).isEqualTo("{\"name\":\"value\"}"); assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8"); } @Test public void anImplementation() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("{\"theName\":\"value\"}")); Call<AnImplementation> call = service.anImplementation(new AnImplementation("value")); Response<AnImplementation> response = call.execute(); AnImplementation body = response.body(); assertThat(body.theName).isEqualTo("value"); RecordedRequest request = server.takeRequest(); assertThat(request.getBody().readUtf8()).isEqualTo("{\"theName\":\"value\"}"); assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8"); } @Test public void annotations() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("\"qualified!\"")); Call<String> call = service.annotations("value"); Response<String> response = call.execute(); assertThat(response.body()).isEqualTo("it worked!"); RecordedRequest request = server.takeRequest(); assertThat(request.getBody().readUtf8()).isEqualTo("\"qualified!\""); assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8"); } @Test public void asLenient() throws IOException, InterruptedException { MockResponse malformedResponse = new MockResponse().setBody("{\"theName\":value}"); server.enqueue(malformedResponse); server.enqueue(malformedResponse); Call<AnImplementation> call = service.anImplementation(new AnImplementation("value")); try { call.execute(); fail(); } catch (IOException e) { assertEquals( e.getMessage(), "Use JsonReader.setLenient(true) to accept malformed JSON at path $.theName"); } Call<AnImplementation> call2 = serviceLenient.anImplementation(new AnImplementation("value")); Response<AnImplementation> response = call2.execute(); AnImplementation body = response.body(); assertThat(body.theName).isEqualTo("value"); } @Test public void withNulls() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("{}")); Call<AnImplementation> call = serviceNulls.anImplementation(new AnImplementation(null)); call.execute(); assertEquals("{\"theName\":null}", server.takeRequest().getBody().readUtf8()); } @Test public void failOnUnknown() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("{\"taco\":\"delicious\"}")); Call<AnImplementation> call = serviceFailOnUnknown.anImplementation(new AnImplementation(null)); try { call.execute(); fail(); } catch (JsonDataException e) { assertThat(e).hasMessage("Cannot skip unexpected NAME at $.taco"); } } @Test public void utf8BomSkipped() throws IOException { Buffer responseBody = new Buffer().write(ByteString.decodeHex("EFBBBF")).writeUtf8("{\"theName\":\"value\"}"); MockResponse malformedResponse = new MockResponse().setBody(responseBody); server.enqueue(malformedResponse); Call<AnImplementation> call = service.anImplementation(new AnImplementation("value")); Response<AnImplementation> response = call.execute(); AnImplementation body = response.body(); assertThat(body.theName).isEqualTo("value"); } @Test public void nonUtf8BomIsNotSkipped() throws IOException { Buffer responseBody = new Buffer() .write(ByteString.decodeHex("FEFF")) .writeString("{\"theName\":\"value\"}", StandardCharsets.UTF_16); MockResponse malformedResponse = new MockResponse().setBody(responseBody); server.enqueue(malformedResponse); Call<AnImplementation> call = service.anImplementation(new AnImplementation("value")); try { call.execute(); fail(); } catch (IOException expected) { } } @Test public void requireFullResponseDocumentConsumption() throws Exception { server.enqueue(new MockResponse().setBody("{\"theName\":\"value\"}")); Call<Value> call = service.value(); try { call.execute(); fail(); } catch (JsonDataException e) { assertThat(e).hasMessage("JSON document was not fully consumed."); } } }
3,927
0
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter/moshi/MoshiConverterFactory.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.moshi; import static java.util.Collections.unmodifiableSet; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonQualifier; import com.squareup.moshi.Moshi; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Set; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} which uses Moshi for JSON. * * <p>Because Moshi is so flexible in the types it supports, this converter assumes that it can * handle all types. If you are mixing JSON serialization with something else (such as protocol * buffers), you must {@linkplain Retrofit.Builder#addConverterFactory(Converter.Factory) add this * instance} last to allow the other converters a chance to see their types. * * <p>Any {@link JsonQualifier @JsonQualifier}-annotated annotations on the parameter will be used * when looking up a request body converter and those on the method will be used when looking up a * response body converter. */ public final class MoshiConverterFactory extends Converter.Factory { /** Create an instance using a default {@link Moshi} instance for conversion. */ public static MoshiConverterFactory create() { return create(new Moshi.Builder().build()); } /** Create an instance using {@code moshi} for conversion. */ @SuppressWarnings("ConstantConditions") // Guarding public API nullability. public static MoshiConverterFactory create(Moshi moshi) { if (moshi == null) throw new NullPointerException("moshi == null"); return new MoshiConverterFactory(moshi, false, false, false); } private final Moshi moshi; private final boolean lenient; private final boolean failOnUnknown; private final boolean serializeNulls; private MoshiConverterFactory( Moshi moshi, boolean lenient, boolean failOnUnknown, boolean serializeNulls) { this.moshi = moshi; this.lenient = lenient; this.failOnUnknown = failOnUnknown; this.serializeNulls = serializeNulls; } /** Return a new factory which uses {@linkplain JsonAdapter#lenient() lenient} adapters. */ public MoshiConverterFactory asLenient() { return new MoshiConverterFactory(moshi, true, failOnUnknown, serializeNulls); } /** Return a new factory which uses {@link JsonAdapter#failOnUnknown()} adapters. */ public MoshiConverterFactory failOnUnknown() { return new MoshiConverterFactory(moshi, lenient, true, serializeNulls); } /** Return a new factory which includes null values into the serialized JSON. */ public MoshiConverterFactory withNullSerialization() { return new MoshiConverterFactory(moshi, lenient, failOnUnknown, true); } @Override public Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { JsonAdapter<?> adapter = moshi.adapter(type, jsonAnnotations(annotations)); if (lenient) { adapter = adapter.lenient(); } if (failOnUnknown) { adapter = adapter.failOnUnknown(); } if (serializeNulls) { adapter = adapter.serializeNulls(); } return new MoshiResponseBodyConverter<>(adapter); } @Override public Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { JsonAdapter<?> adapter = moshi.adapter(type, jsonAnnotations(parameterAnnotations)); if (lenient) { adapter = adapter.lenient(); } if (failOnUnknown) { adapter = adapter.failOnUnknown(); } if (serializeNulls) { adapter = adapter.serializeNulls(); } return new MoshiRequestBodyConverter<>(adapter); } private static Set<? extends Annotation> jsonAnnotations(Annotation[] annotations) { Set<Annotation> result = null; for (Annotation annotation : annotations) { if (annotation.annotationType().isAnnotationPresent(JsonQualifier.class)) { if (result == null) result = new LinkedHashSet<>(); result.add(annotation); } } return result != null ? unmodifiableSet(result) : Collections.emptySet(); } }
3,928
0
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter/moshi/MoshiRequestBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.moshi; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonWriter; import java.io.IOException; import okhttp3.MediaType; import okhttp3.RequestBody; import okio.Buffer; import retrofit2.Converter; final class MoshiRequestBodyConverter<T> implements Converter<T, RequestBody> { private static final MediaType MEDIA_TYPE = MediaType.get("application/json; charset=UTF-8"); private final JsonAdapter<T> adapter; MoshiRequestBodyConverter(JsonAdapter<T> adapter) { this.adapter = adapter; } @Override public RequestBody convert(T value) throws IOException { Buffer buffer = new Buffer(); JsonWriter writer = JsonWriter.of(buffer); adapter.toJson(writer, value); return RequestBody.create(MEDIA_TYPE, buffer.readByteString()); } }
3,929
0
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter/moshi/MoshiResponseBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.moshi; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonDataException; import com.squareup.moshi.JsonReader; import java.io.IOException; import okhttp3.ResponseBody; import okio.BufferedSource; import okio.ByteString; import retrofit2.Converter; final class MoshiResponseBodyConverter<T> implements Converter<ResponseBody, T> { private static final ByteString UTF8_BOM = ByteString.decodeHex("EFBBBF"); private final JsonAdapter<T> adapter; MoshiResponseBodyConverter(JsonAdapter<T> adapter) { this.adapter = adapter; } @Override public T convert(ResponseBody value) throws IOException { BufferedSource source = value.source(); try { // Moshi has no document-level API so the responsibility of BOM skipping falls to whatever // is delegating to it. Since it's a UTF-8-only library as well we only honor the UTF-8 BOM. if (source.rangeEquals(0, UTF8_BOM)) { source.skip(UTF8_BOM.size()); } JsonReader reader = JsonReader.of(source); T result = adapter.fromJson(reader); if (reader.peek() != JsonReader.Token.END_DOCUMENT) { throw new JsonDataException("JSON document was not fully consumed."); } return result; } finally { value.close(); } } }
3,930
0
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/moshi/src/main/java/retrofit2/converter/moshi/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.moshi;
3,931
0
Create_ds/retrofit/retrofit-converters/simplexml/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/simplexml/src/test/java/retrofit2/converter/simplexml/SimpleXmlConverterFactoryTest.java
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.simplexml; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; import java.io.IOException; import java.nio.charset.Charset; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import okio.Buffer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.simpleframework.xml.core.ElementException; import org.simpleframework.xml.core.Persister; import org.simpleframework.xml.stream.Format; import org.simpleframework.xml.stream.HyphenStyle; import org.simpleframework.xml.stream.Verbosity; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.POST; public class SimpleXmlConverterFactoryTest { interface Service { @GET("/") Call<MyObject> get(); @POST("/") Call<MyObject> post(@Body MyObject impl); @GET("/") Call<String> wrongClass(); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { Format format = new Format(0, null, new HyphenStyle(), Verbosity.HIGH); Persister persister = new Persister(format); Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(SimpleXmlConverterFactory.create(persister)) .build(); service = retrofit.create(Service.class); } @Test public void bodyWays() throws IOException, InterruptedException { server.enqueue( new MockResponse() .setBody("<my-object><message>hello world</message><count>10</count></my-object>")); Call<MyObject> call = service.post(new MyObject("hello world", 10)); Response<MyObject> response = call.execute(); MyObject body = response.body(); assertThat(body.getMessage()).isEqualTo("hello world"); assertThat(body.getCount()).isEqualTo(10); RecordedRequest request = server.takeRequest(); assertThat(request.getBody().readUtf8()) .isIn( "<my-object><message>hello world</message><count>10</count></my-object>", "<my-object><count>10</count><message>hello world</message></my-object>"); assertThat(request.getHeader("Content-Type")).isEqualTo("application/xml; charset=UTF-8"); } @Test public void honorsCharacterEncoding() throws IOException { Buffer buffer = new Buffer() .writeString( "<my-object><message>你好,世界</message><count>10</count></my-object>", Charset.forName("GBK")); server.enqueue( new MockResponse().setBody(buffer).addHeader("Content-Type", "text/xml;charset=GBK")); Call<MyObject> call = service.get(); Response<MyObject> response = call.execute(); MyObject body = response.body(); assertThat(body.getMessage()).isEqualTo("你好,世界"); } @Test public void deserializeWrongValue() throws IOException { server.enqueue(new MockResponse().setBody("<myObject><foo/><bar/></myObject>")); Call<?> call = service.get(); try { call.execute(); fail(); } catch (RuntimeException e) { assertThat(e.getCause()) .isInstanceOf(ElementException.class) .hasMessageStartingWith( "Element 'foo' does not have a match in class retrofit2.converter.simplexml.MyObject"); } } @Test public void deserializeWrongClass() throws IOException { server.enqueue( new MockResponse() .setBody("<my-object><message>hello world</message><count>10</count></my-object>")); Call<?> call = service.wrongClass(); try { call.execute(); fail(); } catch (RuntimeException e) { assertThat(e).hasMessage("Could not deserialize body as class java.lang.String"); } } }
3,932
0
Create_ds/retrofit/retrofit-converters/simplexml/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/simplexml/src/test/java/retrofit2/converter/simplexml/MyObject.java
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.simplexml; import org.simpleframework.xml.Default; import org.simpleframework.xml.DefaultType; import org.simpleframework.xml.Element; @Default(value = DefaultType.FIELD) final class MyObject { @Element private String message; @Element private int count; public MyObject() {} public MyObject(String message, int count) { this.message = message; this.count = count; } public void setMessage(String message) { this.message = message; } public String getMessage() { return message; } public void setCount(int count) { this.count = count; } public int getCount() { return count; } @Override public int hashCode() { int result = 1; result = result * 31 + count; result = result * 31 + (message == null ? 0 : message.hashCode()); return result; } @Override public boolean equals(Object obj) { if (obj == this) return true; if (!(obj instanceof MyObject)) return false; MyObject other = (MyObject) obj; return count == other.count && (message == null ? other.message == null : message.equals(other.message)); } }
3,933
0
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter/simplexml/SimpleXmlResponseBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.simplexml; import java.io.IOException; import okhttp3.ResponseBody; import org.simpleframework.xml.Serializer; import retrofit2.Converter; final class SimpleXmlResponseBodyConverter<T> implements Converter<ResponseBody, T> { private final Class<T> cls; private final Serializer serializer; private final boolean strict; SimpleXmlResponseBodyConverter(Class<T> cls, Serializer serializer, boolean strict) { this.cls = cls; this.serializer = serializer; this.strict = strict; } @Override public T convert(ResponseBody value) throws IOException { try { T read = serializer.read(cls, value.charStream(), strict); if (read == null) { throw new IllegalStateException("Could not deserialize body as " + cls); } return read; } catch (RuntimeException | IOException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } finally { value.close(); } } }
3,934
0
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter/simplexml/SimpleXmlRequestBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.simplexml; import java.io.IOException; import java.io.OutputStreamWriter; import okhttp3.MediaType; import okhttp3.RequestBody; import okio.Buffer; import org.simpleframework.xml.Serializer; import retrofit2.Converter; final class SimpleXmlRequestBodyConverter<T> implements Converter<T, RequestBody> { private static final MediaType MEDIA_TYPE = MediaType.get("application/xml; charset=UTF-8"); private static final String CHARSET = "UTF-8"; private final Serializer serializer; SimpleXmlRequestBodyConverter(Serializer serializer) { this.serializer = serializer; } @Override public RequestBody convert(T value) throws IOException { Buffer buffer = new Buffer(); try { OutputStreamWriter osw = new OutputStreamWriter(buffer.outputStream(), CHARSET); serializer.write(value, osw); osw.flush(); } catch (RuntimeException | IOException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } return RequestBody.create(MEDIA_TYPE, buffer.readByteString()); } }
3,935
0
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter/simplexml/SimpleXmlConverterFactory.java
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.simplexml; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.annotation.Nullable; import okhttp3.RequestBody; import okhttp3.ResponseBody; import org.simpleframework.xml.Serializer; import org.simpleframework.xml.core.Persister; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} which uses Simple Framework for XML. * * <p>This converter only applies for class types. Parameterized types (e.g., {@code List<Foo>}) are * not handled. * * @deprecated we recommend switching to the JAXB converter. */ @Deprecated public final class SimpleXmlConverterFactory extends Converter.Factory { /** Create an instance using a default {@link Persister} instance for conversion. */ public static SimpleXmlConverterFactory create() { return create(new Persister()); } /** Create an instance using {@code serializer} for conversion. */ public static SimpleXmlConverterFactory create(Serializer serializer) { return new SimpleXmlConverterFactory(serializer, true); } /** Create an instance using a default {@link Persister} instance for non-strict conversion. */ public static SimpleXmlConverterFactory createNonStrict() { return createNonStrict(new Persister()); } /** Create an instance using {@code serializer} for non-strict conversion. */ @SuppressWarnings("ConstantConditions") // Guarding public API nullability. public static SimpleXmlConverterFactory createNonStrict(Serializer serializer) { if (serializer == null) throw new NullPointerException("serializer == null"); return new SimpleXmlConverterFactory(serializer, false); } private final Serializer serializer; private final boolean strict; private SimpleXmlConverterFactory(Serializer serializer, boolean strict) { this.serializer = serializer; this.strict = strict; } public boolean isStrict() { return strict; } @Override public Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (!(type instanceof Class)) { return null; } Class<?> cls = (Class<?>) type; return new SimpleXmlResponseBodyConverter<>(cls, serializer, strict); } @Override public @Nullable Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { if (!(type instanceof Class)) { return null; } return new SimpleXmlRequestBodyConverter<>(serializer); } }
3,936
0
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/simplexml/src/main/java/retrofit2/converter/simplexml/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.simplexml;
3,937
0
Create_ds/retrofit/retrofit-converters/protobuf/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/protobuf/src/test/java/retrofit2/converter/protobuf/ProtoConverterFactoryTest.java
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.protobuf; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; import static retrofit2.converter.protobuf.PhoneProtos.Phone; import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; import java.util.List; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import okio.Buffer; import okio.ByteString; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.POST; public final class ProtoConverterFactoryTest { interface Service { @GET("/") Call<Phone> get(); @POST("/") Call<Phone> post(@Body Phone impl); @GET("/") Call<String> wrongClass(); @GET("/") Call<List<String>> wrongType(); } interface ServiceWithRegistry { @GET("/") Call<Phone> get(); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; private ServiceWithRegistry serviceWithRegistry; @Before public void setUp() { Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(ProtoConverterFactory.create()) .build(); service = retrofit.create(Service.class); ExtensionRegistry registry = ExtensionRegistry.newInstance(); PhoneProtos.registerAllExtensions(registry); Retrofit retrofitWithRegistry = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(ProtoConverterFactory.createWithRegistry(registry)) .build(); serviceWithRegistry = retrofitWithRegistry.create(ServiceWithRegistry.class); } @Test public void serializeAndDeserialize() throws IOException, InterruptedException { ByteString encoded = ByteString.decodeBase64("Cg4oNTE5KSA4NjctNTMwOQ=="); server.enqueue(new MockResponse().setBody(new Buffer().write(encoded))); Call<Phone> call = service.post(Phone.newBuilder().setNumber("(519) 867-5309").build()); Response<Phone> response = call.execute(); Phone body = response.body(); assertThat(body.getNumber()).isEqualTo("(519) 867-5309"); RecordedRequest request = server.takeRequest(); assertThat(request.getBody().readByteString()).isEqualTo(encoded); assertThat(request.getHeader("Content-Type")).isEqualTo("application/x-protobuf"); } @Test public void deserializeEmpty() throws IOException { server.enqueue(new MockResponse()); Call<Phone> call = service.get(); Response<Phone> response = call.execute(); Phone body = response.body(); assertThat(body.hasNumber()).isFalse(); } @Test public void deserializeUsesRegistry() throws IOException { ByteString encoded = ByteString.decodeBase64("Cg4oNTE5KSA4NjctNTMwORAB"); server.enqueue(new MockResponse().setBody(new Buffer().write(encoded))); Call<Phone> call = serviceWithRegistry.get(); Response<Phone> response = call.execute(); Phone body = response.body(); assertThat(body.getNumber()).isEqualTo("(519) 867-5309"); assertThat(body.getExtension(PhoneProtos.voicemail)).isEqualTo(true); } @Test public void deserializeWrongClass() throws IOException { ByteString encoded = ByteString.decodeBase64("Cg4oNTE5KSA4NjctNTMwOQ=="); server.enqueue(new MockResponse().setBody(new Buffer().write(encoded))); try { service.wrongClass(); fail(); } catch (IllegalArgumentException e) { assertThat(e) .hasMessage( "" + "Unable to create converter for class java.lang.String\n" + " for method Service.wrongClass"); assertThat(e.getCause()) .hasMessage( "" + "Could not locate ResponseBody converter for class java.lang.String.\n" + " Tried:\n" + " * retrofit2.BuiltInConverters\n" + " * retrofit2.converter.protobuf.ProtoConverterFactory\n" + " * retrofit2.OptionalConverterFactory"); } } @Test public void deserializeWrongType() throws IOException { ByteString encoded = ByteString.decodeBase64("Cg4oNTE5KSA4NjctNTMwOQ=="); server.enqueue(new MockResponse().setBody(new Buffer().write(encoded))); try { service.wrongType(); fail(); } catch (IllegalArgumentException e) { assertThat(e) .hasMessage( "" + "Unable to create converter for java.util.List<java.lang.String>\n" + " for method Service.wrongType"); assertThat(e.getCause()) .hasMessage( "" + "Could not locate ResponseBody converter for java.util.List<java.lang.String>.\n" + " Tried:\n" + " * retrofit2.BuiltInConverters\n" + " * retrofit2.converter.protobuf.ProtoConverterFactory\n" + " * retrofit2.OptionalConverterFactory"); } } @Test public void deserializeWrongValue() throws IOException { ByteString encoded = ByteString.decodeBase64("////"); server.enqueue(new MockResponse().setBody(new Buffer().write(encoded))); Call<?> call = service.get(); try { call.execute(); fail(); } catch (RuntimeException e) { assertThat(e.getCause()) .isInstanceOf(InvalidProtocolBufferException.class) .hasMessageContaining("input ended unexpectedly"); } } }
3,938
0
Create_ds/retrofit/retrofit-converters/protobuf/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/protobuf/src/test/java/retrofit2/converter/protobuf/FallbackParserFinderTest.java
/* * Copyright (C) 2016 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.protobuf; import static org.assertj.core.api.Assertions.assertThat; import com.google.protobuf.MessageLite; import com.google.protobuf.Parser; import java.lang.annotation.Annotation; import okhttp3.ResponseBody; import org.junit.Test; import retrofit2.Converter; import retrofit2.Retrofit; import retrofit2.converter.protobuf.PhoneProtos.Phone; public final class FallbackParserFinderTest { @Test public void converterFactoryFallsBackToParserField() { Retrofit retrofit = new Retrofit.Builder().baseUrl("http://localhost/").build(); ProtoConverterFactory factory = ProtoConverterFactory.create(); Converter<ResponseBody, ?> converter = factory.responseBodyConverter(FakePhone.class, new Annotation[0], retrofit); assertThat(converter).isNotNull(); } @SuppressWarnings("unused") // Used reflectively. public abstract static class FakePhone implements MessageLite { public static final Parser<Phone> PARSER = Phone.parser(); } }
3,939
0
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter/protobuf/ProtoRequestBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.protobuf; import com.google.protobuf.MessageLite; import java.io.IOException; import okhttp3.MediaType; import okhttp3.RequestBody; import retrofit2.Converter; final class ProtoRequestBodyConverter<T extends MessageLite> implements Converter<T, RequestBody> { private static final MediaType MEDIA_TYPE = MediaType.get("application/x-protobuf"); @Override public RequestBody convert(T value) throws IOException { byte[] bytes = value.toByteArray(); return RequestBody.create(MEDIA_TYPE, bytes); } }
3,940
0
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter/protobuf/ProtoResponseBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.protobuf; import com.google.protobuf.ExtensionRegistryLite; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.MessageLite; import com.google.protobuf.Parser; import java.io.IOException; import javax.annotation.Nullable; import okhttp3.ResponseBody; import retrofit2.Converter; final class ProtoResponseBodyConverter<T extends MessageLite> implements Converter<ResponseBody, T> { private final Parser<T> parser; private final @Nullable ExtensionRegistryLite registry; ProtoResponseBodyConverter(Parser<T> parser, @Nullable ExtensionRegistryLite registry) { this.parser = parser; this.registry = registry; } @Override public T convert(ResponseBody value) throws IOException { try { return registry == null ? parser.parseFrom(value.byteStream()) : parser.parseFrom(value.byteStream(), registry); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); // Despite extending IOException, this is data mismatch. } finally { value.close(); } } }
3,941
0
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter/protobuf/ProtoConverterFactory.java
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.protobuf; import com.google.protobuf.ExtensionRegistryLite; import com.google.protobuf.MessageLite; import com.google.protobuf.Parser; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Type; import javax.annotation.Nullable; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} which uses Protocol Buffers. * * <p>This converter only applies for types which extend from {@link MessageLite} (or one of its * subclasses). */ public final class ProtoConverterFactory extends Converter.Factory { public static ProtoConverterFactory create() { return new ProtoConverterFactory(null); } /** Create an instance which uses {@code registry} when deserializing. */ public static ProtoConverterFactory createWithRegistry(@Nullable ExtensionRegistryLite registry) { return new ProtoConverterFactory(registry); } private final @Nullable ExtensionRegistryLite registry; private ProtoConverterFactory(@Nullable ExtensionRegistryLite registry) { this.registry = registry; } @Override public @Nullable Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { if (!(type instanceof Class<?>)) { return null; } Class<?> c = (Class<?>) type; if (!MessageLite.class.isAssignableFrom(c)) { return null; } Parser<MessageLite> parser; try { Method method = c.getDeclaredMethod("parser"); //noinspection unchecked parser = (Parser<MessageLite>) method.invoke(null); } catch (InvocationTargetException e) { throw new RuntimeException(e.getCause()); } catch (NoSuchMethodException | IllegalAccessException ignored) { // If the method is missing, fall back to original static field for pre-3.0 support. try { Field field = c.getDeclaredField("PARSER"); //noinspection unchecked parser = (Parser<MessageLite>) field.get(null); } catch (NoSuchFieldException | IllegalAccessException e) { throw new IllegalArgumentException( "Found a protobuf message but " + c.getName() + " had no parser() method or PARSER field.", e); } } return new ProtoResponseBodyConverter<>(parser, registry); } @Override public @Nullable Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { if (!(type instanceof Class<?>)) { return null; } if (!MessageLite.class.isAssignableFrom((Class<?>) type)) { return null; } return new ProtoRequestBodyConverter<>(); } }
3,942
0
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/protobuf/src/main/java/retrofit2/converter/protobuf/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.protobuf;
3,943
0
Create_ds/retrofit/retrofit-converters/jackson/src/test/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jackson/src/test/java/retrofit2/converter/jackson/JacksonConverterFactoryTest.java
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jackson; import static org.assertj.core.api.Assertions.assertThat; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.StdSerializer; import java.io.IOException; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.Body; import retrofit2.http.POST; public class JacksonConverterFactoryTest { interface AnInterface { String getName(); } static class AnImplementation implements AnInterface { private String theName; AnImplementation() {} AnImplementation(String name) { theName = name; } @Override public String getName() { return theName; } } static class AnInterfaceSerializer extends StdSerializer<AnInterface> { AnInterfaceSerializer() { super(AnInterface.class); } @Override public void serialize( AnInterface anInterface, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeFieldName("name"); jsonGenerator.writeString(anInterface.getName()); jsonGenerator.writeEndObject(); } } static class AnInterfaceDeserializer extends StdDeserializer<AnInterface> { AnInterfaceDeserializer() { super(AnInterface.class); } @Override public AnInterface deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { if (jp.getCurrentToken() != JsonToken.START_OBJECT) { throw new AssertionError("Expected start object."); } String name = null; while (jp.nextToken() != JsonToken.END_OBJECT) { switch (jp.getCurrentName()) { case "name": name = jp.getValueAsString(); break; } } return new AnImplementation(name); } } interface Service { @POST("/") Call<AnImplementation> anImplementation(@Body AnImplementation impl); @POST("/") Call<AnInterface> anInterface(@Body AnInterface impl); } @Rule public final MockWebServer server = new MockWebServer(); private Service service; @Before public void setUp() { SimpleModule module = new SimpleModule(); module.addSerializer(AnInterface.class, new AnInterfaceSerializer()); module.addDeserializer(AnInterface.class, new AnInterfaceDeserializer()); ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(module); mapper.configure(MapperFeature.AUTO_DETECT_GETTERS, false); mapper.configure(MapperFeature.AUTO_DETECT_SETTERS, false); mapper.configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false); mapper.setVisibilityChecker( mapper .getSerializationConfig() .getDefaultVisibilityChecker() .withFieldVisibility(JsonAutoDetect.Visibility.ANY)); Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(JacksonConverterFactory.create(mapper)) .build(); service = retrofit.create(Service.class); } @Test public void anInterface() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("{\"name\":\"value\"}")); Call<AnInterface> call = service.anInterface(new AnImplementation("value")); Response<AnInterface> response = call.execute(); AnInterface body = response.body(); assertThat(body.getName()).isEqualTo("value"); RecordedRequest request = server.takeRequest(); assertThat(request.getBody().readUtf8()).isEqualTo("{\"name\":\"value\"}"); assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8"); } @Test public void anImplementation() throws IOException, InterruptedException { server.enqueue(new MockResponse().setBody("{\"theName\":\"value\"}")); Call<AnImplementation> call = service.anImplementation(new AnImplementation("value")); Response<AnImplementation> response = call.execute(); AnImplementation body = response.body(); assertThat(body.theName).isEqualTo("value"); RecordedRequest request = server.takeRequest(); // TODO figure out how to get Jackson to stop using AnInterface's serializer here. assertThat(request.getBody().readUtf8()).isEqualTo("{\"name\":\"value\"}"); assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8"); } }
3,944
0
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter/jackson/JacksonResponseBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jackson; import com.fasterxml.jackson.databind.ObjectReader; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.Converter; final class JacksonResponseBodyConverter<T> implements Converter<ResponseBody, T> { private final ObjectReader adapter; JacksonResponseBodyConverter(ObjectReader adapter) { this.adapter = adapter; } @Override public T convert(ResponseBody value) throws IOException { try { return adapter.readValue(value.charStream()); } finally { value.close(); } } }
3,945
0
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter/jackson/JacksonConverterFactory.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jackson; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Converter; import retrofit2.Retrofit; /** * A {@linkplain Converter.Factory converter} which uses Jackson. * * <p>Because Jackson is so flexible in the types it supports, this converter assumes that it can * handle all types. If you are mixing JSON serialization with something else (such as protocol * buffers), you must {@linkplain Retrofit.Builder#addConverterFactory(Converter.Factory) add this * instance} last to allow the other converters a chance to see their types. */ public final class JacksonConverterFactory extends Converter.Factory { /** Create an instance using a default {@link ObjectMapper} instance for conversion. */ public static JacksonConverterFactory create() { return create(new ObjectMapper()); } /** Create an instance using {@code mapper} for conversion. */ @SuppressWarnings("ConstantConditions") // Guarding public API nullability. public static JacksonConverterFactory create(ObjectMapper mapper) { if (mapper == null) throw new NullPointerException("mapper == null"); return new JacksonConverterFactory(mapper); } private final ObjectMapper mapper; private JacksonConverterFactory(ObjectMapper mapper) { this.mapper = mapper; } @Override public Converter<ResponseBody, ?> responseBodyConverter( Type type, Annotation[] annotations, Retrofit retrofit) { JavaType javaType = mapper.getTypeFactory().constructType(type); ObjectReader reader = mapper.readerFor(javaType); return new JacksonResponseBodyConverter<>(reader); } @Override public Converter<?, RequestBody> requestBodyConverter( Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { JavaType javaType = mapper.getTypeFactory().constructType(type); ObjectWriter writer = mapper.writerFor(javaType); return new JacksonRequestBodyConverter<>(writer); } }
3,946
0
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter/jackson/JacksonRequestBodyConverter.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.converter.jackson; import com.fasterxml.jackson.databind.ObjectWriter; import java.io.IOException; import okhttp3.MediaType; import okhttp3.RequestBody; import retrofit2.Converter; final class JacksonRequestBodyConverter<T> implements Converter<T, RequestBody> { private static final MediaType MEDIA_TYPE = MediaType.get("application/json; charset=UTF-8"); private final ObjectWriter adapter; JacksonRequestBodyConverter(ObjectWriter adapter) { this.adapter = adapter; } @Override public RequestBody convert(T value) throws IOException { byte[] bytes = adapter.writeValueAsBytes(value); return RequestBody.create(MEDIA_TYPE, bytes); } }
3,947
0
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter
Create_ds/retrofit/retrofit-converters/jackson/src/main/java/retrofit2/converter/jackson/package-info.java
@retrofit2.internal.EverythingIsNonNull package retrofit2.converter.jackson;
3,948
0
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss/server/EdgeHealthCheckHandler.java
package com.netflix.recipes.rss.server; import com.netflix.karyon.spi.HealthCheckHandler; public class EdgeHealthCheckHandler implements HealthCheckHandler { public int getStatus() { return 200; } }
3,949
0
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss/server/EdgeServer.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.server; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.netflix.config.ConfigurationManager; import com.netflix.karyon.spi.PropertyNames; /** * Edge Server * * @author Chris Fregly (chris@fregly.com) */ public class EdgeServer extends BaseJettyServer { private static final Logger logger = LoggerFactory .getLogger(EdgeServer.class); public EdgeServer() { } public static void main(final String[] args) throws Exception { System.setProperty("archaius.deployment.applicationId", "edge"); System.setProperty(PropertyNames.SERVER_BOOTSTRAP_BASE_PACKAGES_OVERRIDE, "com.netflix"); String appId = ConfigurationManager.getDeploymentContext().getApplicationId(); String env = ConfigurationManager.getDeploymentContext().getDeploymentEnvironment(); // populate the eureka-specific properties System.setProperty("eureka.client.props", appId); if (env != null) { System.setProperty("eureka.environment", env); } EdgeServer edgeServer = new EdgeServer(); edgeServer.start(); } }
3,950
0
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss/hystrix/DeleteRSSCommand.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.hystrix; import com.google.common.base.Charsets; import com.netflix.client.ClientFactory; import com.netflix.hystrix.HystrixCommand; import com.netflix.hystrix.HystrixCommandGroupKey; import com.netflix.hystrix.HystrixCommandKey; import com.netflix.hystrix.HystrixThreadPoolKey; import com.netflix.niws.client.http.HttpClientRequest; import com.netflix.niws.client.http.HttpClientRequest.Verb; import com.netflix.niws.client.http.HttpClientResponse; import com.netflix.niws.client.http.RestClient; import com.netflix.recipes.rss.RSSConstants; import org.apache.commons.io.IOUtils; import java.net.URI; /** * Calls the middle tier Delete RSS entry point */ public class DeleteRSSCommand extends HystrixCommand<String> { // RSS Feed Url (encoded) private final String url; public DeleteRSSCommand(String url) { super ( Setter.withGroupKey( HystrixCommandGroupKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_MUTATIONS_GROUP)) .andCommandKey(HystrixCommandKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_DEL_COMMAND_KEY)) .andThreadPoolKey(HystrixThreadPoolKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_THREAD_POOL) ) ); this.url = url; } @Override protected String run() { try { // The named client param must match the prefix for the ribbon // configuration specified in the edge.properties file RestClient client = (RestClient) ClientFactory.getNamedClient(RSSConstants.MIDDLETIER_REST_CLIENT); HttpClientRequest request = HttpClientRequest .newBuilder() .setVerb(Verb.DELETE) .setUri(new URI("/" + RSSConstants.MIDDLETIER_WEB_RESOURCE_ROOT_PATH + RSSConstants.RSS_ENTRY_POINT + "?url=" + url) ) .build(); HttpClientResponse response = client.executeWithLoadBalancer(request); return IOUtils.toString(response.getRawEntity(), Charsets.UTF_8); } catch (Exception exc) { throw new RuntimeException("Exception", exc); } } @Override protected String getFallback() { // Empty json return "{}"; } }
3,951
0
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss/hystrix/GetRSSCommand.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.hystrix; import com.google.common.base.Charsets; import com.netflix.client.ClientFactory; import com.netflix.hystrix.HystrixCommand; import com.netflix.hystrix.HystrixCommandGroupKey; import com.netflix.hystrix.HystrixCommandKey; import com.netflix.hystrix.HystrixThreadPoolKey; import com.netflix.niws.client.http.HttpClientRequest; import com.netflix.niws.client.http.HttpClientRequest.Verb; import com.netflix.niws.client.http.HttpClientResponse; import com.netflix.niws.client.http.RestClient; import com.netflix.recipes.rss.RSSConstants; import org.apache.commons.io.IOUtils; import java.net.URI; /** * Calls the middle tier Get RSS entry point */ public class GetRSSCommand extends HystrixCommand<String> { public GetRSSCommand() { super ( Setter.withGroupKey( HystrixCommandGroupKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_GET_GROUP)) .andCommandKey(HystrixCommandKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_GET_COMMAND_KEY)) .andThreadPoolKey(HystrixThreadPoolKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_THREAD_POOL) ) ); } @Override protected String run() { try { // The named client param must match the prefix for the ribbon // configuration specified in the edge.properties file RestClient client = (RestClient) ClientFactory.getNamedClient(RSSConstants.MIDDLETIER_REST_CLIENT); HttpClientRequest request = HttpClientRequest .newBuilder() .setVerb(Verb.GET) .setUri(new URI("/" + RSSConstants.MIDDLETIER_WEB_RESOURCE_ROOT_PATH + RSSConstants.RSS_ENTRY_POINT) ) .build(); HttpClientResponse response = client.executeWithLoadBalancer(request); return IOUtils.toString(response.getRawEntity(), Charsets.UTF_8); } catch (Exception exc) { throw new RuntimeException("Exception", exc); } } @Override protected String getFallback() { // Empty json return "{}"; } }
3,952
0
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-edge/src/main/java/com/netflix/recipes/rss/hystrix/AddRSSCommand.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.hystrix; import com.google.common.base.Charsets; import com.netflix.client.ClientFactory; import com.netflix.hystrix.HystrixCommand; import com.netflix.hystrix.HystrixCommandGroupKey; import com.netflix.hystrix.HystrixCommandKey; import com.netflix.hystrix.HystrixThreadPoolKey; import com.netflix.niws.client.http.HttpClientRequest; import com.netflix.niws.client.http.HttpClientRequest.Verb; import com.netflix.niws.client.http.HttpClientResponse; import com.netflix.niws.client.http.RestClient; import com.netflix.recipes.rss.RSSConstants; import org.apache.commons.io.IOUtils; import java.net.URI; /** * Calls the middle tier Add RSS entry point */ public class AddRSSCommand extends HystrixCommand<String> { // RSS Feed Url (encoded) private final String url; public AddRSSCommand(String url) { super ( Setter.withGroupKey( HystrixCommandGroupKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_MUTATIONS_GROUP)) .andCommandKey(HystrixCommandKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_ADD_COMMAND_KEY)) .andThreadPoolKey(HystrixThreadPoolKey.Factory.asKey(RSSConstants.HYSTRIX_RSS_THREAD_POOL) ) ); this.url = url; } @Override protected String run() { try { /* * The named client param must match the prefix for the ribbon * configuration specified in the edge.properties file */ RestClient client = (RestClient) ClientFactory.getNamedClient(RSSConstants.MIDDLETIER_REST_CLIENT); HttpClientRequest request = HttpClientRequest .newBuilder() .setVerb(Verb.POST) .setUri(new URI("/" + RSSConstants.MIDDLETIER_WEB_RESOURCE_ROOT_PATH + RSSConstants.RSS_ENTRY_POINT + "?url=" + url)) .build(); HttpClientResponse response = client.executeWithLoadBalancer(request); return IOUtils.toString(response.getRawEntity(), Charsets.UTF_8); } catch (Exception exc) { throw new RuntimeException("Exception occurred when adding a RSS feed", exc); } } @Override protected String getFallback() { // Empty json return "{}"; } }
3,953
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/Subscriptions.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss; import java.util.List; /** * Represents a User and a list of subscribed feeds * * @author ppadmanabhan * */ public interface Subscriptions { /** * UUID of the user */ String getUser(); /** * List of the subscribed RSS feeds along with its contents */ List<RSS> getSubscriptions(); }
3,954
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/RSS.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss; import java.util.List; /** * Represents a RSS feed * * @author ppadmanabhan * */ public interface RSS { /** * RSS Feed url */ String getUrl(); /** * Return the title of the RSS feed */ String getTitle(); /** * Returns the list of items of the RSS feed */ List<RSSItem> getItems(); }
3,955
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/RSSItem.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss; /* * Represents each item (story) within a RSS feed */ public interface RSSItem { /** * The title of the item */ String getTitle(); /** * Link to the actual story */ String getLink(); /** * Description of the item */ String getDescription(); }
3,956
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/RSSStore.java
package com.netflix.recipes.rss; import java.util.List; public interface RSSStore { List<String> getSubscribedUrls(String userId) throws Exception; void subscribeUrl(String userId, String url) throws Exception; void unsubscribeUrl(String userId, String url) throws Exception; }
3,957
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/impl/RSSItemImpl.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.impl; import com.netflix.recipes.rss.RSSItem; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; @XmlRootElement @XmlAccessorType(XmlAccessType.NONE) public class RSSItemImpl implements RSSItem { private final String title; private final String link; private final String description; public RSSItemImpl() { this.title = null; this.link = null; this.description = null; } public RSSItemImpl(String title, String link, String description) { this.title = title; this.link = link; this.description = description; } @XmlElement(name="title") public String getTitle() { return title; } @XmlElement(name="link") public String getLink() { return link; } @XmlElement(name="description") public String getDescription() { return description; } }
3,958
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/impl/CassandraStoreImpl.java
package com.netflix.recipes.rss.impl; import com.netflix.astyanax.AstyanaxContext; import com.netflix.astyanax.Keyspace; import com.netflix.astyanax.connectionpool.NodeDiscoveryType; import com.netflix.astyanax.connectionpool.OperationResult; import com.netflix.astyanax.connectionpool.exceptions.NotFoundException; import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl; import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor; import com.netflix.astyanax.impl.AstyanaxConfigurationImpl; import com.netflix.astyanax.model.Column; import com.netflix.astyanax.model.ColumnFamily; import com.netflix.astyanax.model.ColumnList; import com.netflix.astyanax.serializers.StringSerializer; import com.netflix.astyanax.thrift.ThriftFamilyFactory; import com.netflix.config.DynamicPropertyFactory; import com.netflix.recipes.rss.RSSConstants; import com.netflix.recipes.rss.RSSStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; public class CassandraStoreImpl implements RSSStore { private static final Logger logger = LoggerFactory.getLogger(CassandraStoreImpl.class); // Cassandra keyspace private static Keyspace ks; // Data model is documented in the wiki private static final ColumnFamily<String, String> CF_SUBSCRIPTIONS = new ColumnFamily<String, String>("Subscriptions", StringSerializer.get(), StringSerializer.get()); /** * Get the feed urls from Cassandra */ @Override public List<String> getSubscribedUrls(String userId) throws Exception{ OperationResult<ColumnList<String>> response; try { response = getKeyspace().prepareQuery(CF_SUBSCRIPTIONS).getKey(userId).execute(); } catch (NotFoundException e) { logger.error("No record found for this user: " + userId); throw e; } catch (Exception t) { logger.error("Exception occurred when fetching from Cassandra: " + t); throw t; } final List<String> items = new ArrayList<String>(); if (response != null) { final ColumnList<String> columns = response.getResult(); for (Column<String> column : columns) { items.add(column.getName()); } } return items; } /** * Add feed url into Cassandra */ @Override public void subscribeUrl(String userId, String url) throws Exception{ try { OperationResult<Void> opr = getKeyspace().prepareColumnMutation(CF_SUBSCRIPTIONS, userId, url) .putValue("1", null).execute(); logger.info("Time taken to add to Cassandra (in ms): " + opr.getLatency(TimeUnit.MILLISECONDS)); } catch (Exception e) { logger.error("Exception occurred when writing to Cassandra: " + e); throw e; } } /** * Delete feed url from Cassandra */ @Override public void unsubscribeUrl(String userId, String url) throws Exception{ try { OperationResult<Void> opr = getKeyspace().prepareColumnMutation(CF_SUBSCRIPTIONS, userId, url) .deleteColumn().execute(); logger.info("Time taken to delete from Cassandra (in ms): " + opr.getLatency(TimeUnit.MILLISECONDS)); } catch (Exception e) { logger.error("Exception occurred when writing to Cassandra: " + e); throw e; } } /** * Connect to Cassandra */ private static Keyspace getKeyspace() throws Exception{ if (ks == null) { try { AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder() .forKeyspace(DynamicPropertyFactory.getInstance().getStringProperty(RSSConstants.CASSANDRA_KEYSPACE, null).get()) .withAstyanaxConfiguration(new AstyanaxConfigurationImpl() .setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE) ) .withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl("MyConnectionPool") .setPort(DynamicPropertyFactory.getInstance().getIntProperty(RSSConstants.CASSANDRA_PORT, 0).get()) .setMaxConnsPerHost(DynamicPropertyFactory.getInstance().getIntProperty(RSSConstants.CASSANDRA_MAXCONNSPERHOST, 1).get()) .setSeeds(DynamicPropertyFactory.getInstance().getStringProperty(RSSConstants.CASSANDRA_HOST, "").get() + ":" + DynamicPropertyFactory.getInstance().getIntProperty(RSSConstants.CASSANDRA_PORT, 0).get() ) ) .withConnectionPoolMonitor(new CountingConnectionPoolMonitor()) .buildKeyspace(ThriftFamilyFactory.getInstance()); context.start(); ks = context.getEntity(); } catch (Exception e) { logger.error("Exception occurred when initializing Cassandra keyspace: " + e); throw e; } } return ks; } }
3,959
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/impl/RSSImpl.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.impl; import com.netflix.recipes.rss.RSS; import com.netflix.recipes.rss.RSSItem; import javax.xml.bind.annotation.*; import java.util.List; @XmlRootElement @XmlAccessorType(XmlAccessType.NONE) public class RSSImpl implements RSS{ private final String url; private final String title; private final List<RSSItem> items; public RSSImpl() { this.url = null; this.title = null; this.items = null; } public RSSImpl(String url, String title, List<RSSItem> items) { this.url = url; this.title = title; this.items = items; } @XmlElement(name="url") public String getUrl() { return url; } @XmlElement(name="title") public String getTitle() { return title; } @XmlElements({@XmlElement(name="items", type=RSSItemImpl.class)}) public List<RSSItem> getItems() { return items; } }
3,960
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/impl/InMemoryStoreImpl.java
package com.netflix.recipes.rss.impl; import com.netflix.recipes.rss.RSSStore; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class InMemoryStoreImpl implements RSSStore { // Maintain the same data model as Cassandra private static final Map<String, HashMap<String, String>> map = new HashMap<String, HashMap<String, String>>(); @Override public List<String> getSubscribedUrls(String userId) { List<String> urls = new ArrayList<String>(); if (map.containsKey(userId)) { for (final Map.Entry<String, String> entry: map.get(userId).entrySet()) { urls.add(entry.getKey()); } } return urls; } @Override public void subscribeUrl(String userId, String url) { HashMap<String, String> feeds; if (map.containsKey(userId)) { feeds = map.get(userId); } else { feeds = new HashMap<String, String>(1); } feeds.put(url, "1"); map.put(userId, feeds); } @Override public void unsubscribeUrl(String userId, String url) { map.get(userId).remove(url); } }
3,961
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/impl/SubscriptionsImpl.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.impl; import com.netflix.recipes.rss.RSS; import com.netflix.recipes.rss.Subscriptions; import javax.xml.bind.annotation.*; import java.util.List; @XmlRootElement @XmlAccessorType(XmlAccessType.NONE) public class SubscriptionsImpl implements Subscriptions { private String user; private List<RSS> subscriptions; public SubscriptionsImpl() { this.user = null; this.subscriptions = null; } public SubscriptionsImpl(String user, List<RSS> subscriptions) { this.user = user; this.subscriptions = subscriptions; } @XmlElement(name="user") public String getUser() { return user; } @XmlElements({@XmlElement(name="subscriptions", type=RSSImpl.class)}) public List<RSS> getSubscriptions() { return subscriptions; } }
3,962
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/jersey
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/jersey/resources/MiddleTierResource.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.jersey.resources; import com.netflix.recipes.rss.Subscriptions; import com.netflix.recipes.rss.manager.RSSManager; import com.netflix.servo.DefaultMonitorRegistry; import com.netflix.servo.monitor.*; import com.netflix.servo.stats.StatsConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.net.URLDecoder; import java.util.concurrent.TimeUnit; /** * Rest entry points for fetching/adding/deleting RSS feeds. * RSS Edge service will be calling these APIs */ @Path("/middletier") public class MiddleTierResource { private static final Logger logger = LoggerFactory.getLogger(MiddleTierResource.class); // JMX: com.netflix.servo.COUNTER.MiddleTierRSS_* private static final Counter getRSSRequestCounter = new BasicCounter(MonitorConfig.builder("MiddleTierRSS_getRequestCounter").build()); private static final Counter addRSSRequestCounter = new BasicCounter(MonitorConfig.builder("MiddleTierRSS_addRequestCounter").build()); private static final Counter delRSSRequestCounter = new BasicCounter(MonitorConfig.builder("MiddleTierRSS_delRequestCounter").build()); // JMX: com.netflix.servo.COUNTER.MiddleTierRSS_* private static final Counter getRSSErrorCounter = new BasicCounter(MonitorConfig.builder("MiddleTierRSS_getErrorCounter").build()); private static final Counter addRSSErrorCounter = new BasicCounter(MonitorConfig.builder("MiddleTierRSS_addErrorCounter").build()); private static final Counter delRSSErrorCounter = new BasicCounter(MonitorConfig.builder("MiddleTierRSS_delErrorCounter").build()); // JMX: com.netflix.servo.COUNTER.MiddleTierRSS_* // JMX: com.netflix.servo.MiddleTierRSS_* (95th and 99th percentile) private static final StatsTimer getRSSStatsTimer = new StatsTimer(MonitorConfig.builder("MiddleTierRSS_getStatsTimer").build(), new StatsConfig.Builder().build()); private static final StatsTimer addRSSStatsTimer = new StatsTimer(MonitorConfig.builder("MiddleTierRSS_addStatsTimer").build(), new StatsConfig.Builder().build()); private static final StatsTimer delRSSStatsTimer = new StatsTimer(MonitorConfig.builder("MiddleTierRSS_delStatsTimer").build(), new StatsConfig.Builder().build()); static { DefaultMonitorRegistry.getInstance().register(getRSSRequestCounter); DefaultMonitorRegistry.getInstance().register(addRSSRequestCounter); DefaultMonitorRegistry.getInstance().register(delRSSRequestCounter); DefaultMonitorRegistry.getInstance().register(getRSSErrorCounter); DefaultMonitorRegistry.getInstance().register(addRSSErrorCounter); DefaultMonitorRegistry.getInstance().register(delRSSErrorCounter); DefaultMonitorRegistry.getInstance().register(getRSSStatsTimer); DefaultMonitorRegistry.getInstance().register(addRSSStatsTimer); DefaultMonitorRegistry.getInstance().register(delRSSStatsTimer); } public MiddleTierResource() { } @GET @Path("/rss/user/{user}") @Produces({MediaType.APPLICATION_JSON}) public Response fetchSubscriptions (final @PathParam("user") String user) { // Start timer Stopwatch stopwatch = getRSSStatsTimer.start(); try { getRSSRequestCounter.increment(); Subscriptions subscriptions = RSSManager.getInstance().getSubscriptions(user); return Response.ok(subscriptions).build(); } catch (Exception e) { logger.error("Exception occurred when fetching subscriptions", e); getRSSErrorCounter.increment(); return Response.serverError().build(); } finally { stopwatch.stop(); getRSSStatsTimer.record(stopwatch.getDuration(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); } } @POST @Path("/rss/user/{user}") @Produces({MediaType.APPLICATION_JSON}) public Response subscribe ( final @QueryParam("url") String url, final @PathParam("user") String user) { // Start timer Stopwatch stopwatch = addRSSStatsTimer.start(); try { addRSSRequestCounter.increment(); String decodedUrl = URLDecoder.decode(url, "UTF-8"); RSSManager.getInstance().addSubscription(user, decodedUrl); return Response.ok().build(); } catch (Exception e) { logger.error("Exception occurred during subscription", e); addRSSErrorCounter.increment(); return Response.serverError().build(); } finally { stopwatch.stop(); addRSSStatsTimer.record(stopwatch.getDuration(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); } } @DELETE @Path("/rss/user/{user}") @Produces({MediaType.APPLICATION_JSON}) public Response unsubscribe ( final @QueryParam("url") String url, final @PathParam("user") String user) { // Start timer Stopwatch stopwatch = delRSSStatsTimer.start(); try { delRSSRequestCounter.increment(); String decodedUrl = URLDecoder.decode(url, "UTF-8"); RSSManager.getInstance().deleteSubscription(user, decodedUrl); return Response.ok().build(); } catch (Exception e) { logger.error("Exception occurred during un-subscription", e); delRSSErrorCounter.increment(); return Response.serverError().build(); } finally { stopwatch.stop(); delRSSStatsTimer.record(stopwatch.getDuration(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); } } }
3,963
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/test
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/test/utils/EmbeddedMiddleTierForTests.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.test.utils; import com.google.common.io.Closeables; import com.google.inject.Injector; import com.netflix.governator.guice.LifecycleInjector; import com.netflix.governator.lifecycle.LifecycleManager; import com.netflix.recipes.rss.server.MiddleTierServer; import com.netflix.recipes.rss.util.RSSModule; /** * Middle Tier Test */ public class EmbeddedMiddleTierForTests { public MiddleTierServer middleTierServer; public void setUp() throws Exception { System.setProperty("archaius.deployment.applicationId", "middletier"); System.setProperty("archaius.deployment.environment", "ci"); Injector injector = LifecycleInjector.builder().withModules(new RSSModule()).createInjector(); LifecycleManager lifecycleManager = injector.getInstance(LifecycleManager.class); lifecycleManager.start(); middleTierServer = injector.getInstance(MiddleTierServer.class); middleTierServer.start(); } public void tearDown() throws Exception { Closeables.closeQuietly(middleTierServer); } }
3,964
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/server/MiddleTierServer.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.server; import com.netflix.karyon.spi.PropertyNames; /** * @author Chris Fregly (chris@fregly.com) */ public class MiddleTierServer extends BaseNettyServer { public MiddleTierServer() { } public static void main(String args[]) throws Exception { System.setProperty("archaius.deployment.applicationId", "middletier"); System.setProperty(PropertyNames.SERVER_BOOTSTRAP_BASE_PACKAGES_OVERRIDE, "com.netflix"); MiddleTierServer middleTierServer = new MiddleTierServer(); middleTierServer.start(); } }
3,965
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/manager/MiddleTierHealthCheckHandler.java
package com.netflix.recipes.rss.manager; import com.netflix.karyon.spi.HealthCheckHandler; public class MiddleTierHealthCheckHandler implements HealthCheckHandler { public int getStatus() { return RSSManager.getInstance().getStatus(); } }
3,966
0
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-middletier/src/main/java/com/netflix/recipes/rss/manager/RSSManager.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.manager; import java.io.IOException; import java.io.StringReader; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.netflix.client.ClientFactory; import com.netflix.config.DynamicPropertyFactory; import com.netflix.karyon.spi.HealthCheckHandler; import com.netflix.niws.client.http.HttpClientRequest; import com.netflix.niws.client.http.HttpClientResponse; import com.netflix.niws.client.http.RestClient; import com.netflix.recipes.rss.RSS; import com.netflix.recipes.rss.RSSConstants; import com.netflix.recipes.rss.RSSItem; import com.netflix.recipes.rss.RSSStore; import com.netflix.recipes.rss.Subscriptions; import com.netflix.recipes.rss.impl.CassandraStoreImpl; import com.netflix.recipes.rss.impl.InMemoryStoreImpl; import com.netflix.recipes.rss.impl.RSSImpl; import com.netflix.recipes.rss.impl.RSSItemImpl; import com.netflix.recipes.rss.impl.SubscriptionsImpl; /** * RSS Manager that * 1) Fetches content from RSS feeds using Ribbon * 2) Parses RSS feeds * 3) Persists feed urls into * a) Cassandra using Astyanax (or) * b) InMemoryStore */ public class RSSManager implements HealthCheckHandler { private RSSStore store; private static final Logger logger = LoggerFactory.getLogger(RSSManager.class); private static final RSSManager instance = new RSSManager(); private RSSManager() { if (RSSConstants.RSS_STORE_CASSANDRA.equals( DynamicPropertyFactory.getInstance().getStringProperty(RSSConstants.RSS_STORE, RSSConstants.RSS_STORE_CASSANDRA).get())) { store = new CassandraStoreImpl(); } else { store = new InMemoryStoreImpl(); } } public static RSSManager getInstance() { return instance; } /** * Fetches the User subscriptions */ public Subscriptions getSubscriptions(String userId) throws Exception { List<String> feedUrls = store.getSubscribedUrls(userId); List<RSS> feeds = new ArrayList<RSS>(feedUrls.size()); for (String feedUrl: feedUrls) { RSS rss = RSSManager.getInstance().fetchRSSFeed(feedUrl); if (rss.getItems() != null && !rss.getItems().isEmpty()) { feeds.add(rss); } } return new SubscriptionsImpl(userId, feeds); } /** * Add subscription */ public void addSubscription(String user, String decodedUrl) throws Exception { if (decodedUrl == null) throw new IllegalArgumentException("url cannot be null"); store.subscribeUrl(user, decodedUrl); } /** * Delete subscription */ public void deleteSubscription(String user, String decodedUrl) throws Exception { if (decodedUrl == null) throw new IllegalArgumentException("url cannot be null"); store.unsubscribeUrl(user, decodedUrl); } /** * Fetch the RSS feed content using Ribbon */ private RSS fetchRSSFeed(String url) { RestClient client = (RestClient) ClientFactory.getNamedClient(RSSConstants.MIDDLETIER_REST_CLIENT); HttpClientResponse response; String rssData = null; try { HttpClientRequest request = HttpClientRequest.newBuilder().setUri(new URI(url)).build(); response = client.execute(request); if (response != null) { rssData = IOUtils.toString(response.getRawEntity(), Charsets.UTF_8); logger.info("Status code for " + response.getRequestedURI() + " : " + response.getStatus()); } } catch (URISyntaxException e) { logger.error("Exception occurred when setting the URI", e); } catch (Exception e) { logger.error("Exception occurred when executing the HTTP request", e); } return parseRSS(url, rssData); } /** * Parses the RSS feeds and return back a POJO */ private RSS parseRSS(String url, String rss) { // Error case if (rss == null) return new RSSImpl(); RSS rssItems = null; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); String FEATURE = "http://apache.org/xml/features/disallow-doctype-decl"; try { dbf.setFeature(FEATURE, true); DocumentBuilder db = dbf.newDocumentBuilder(); try { InputSource is = new InputSource(new StringReader(rss)); Document dom = db.parse(is); Element docEle = dom.getDocumentElement(); List<RSSItem> items = new ArrayList<RSSItem>(); String title = docEle.getElementsByTagName("title").item(0).getTextContent(); NodeList nl = docEle.getElementsByTagName("item"); if (nl != null && nl.getLength() > 0) { for (int i = 0 ; i < nl.getLength(); i++) { Element el = (Element) nl.item(i); items.add(new RSSItemImpl(el.getElementsByTagName("title").item(0).getTextContent(), el.getElementsByTagName("link").item(0).getTextContent(), el.getElementsByTagName("description").item(0).getTextContent())); } } rssItems = new RSSImpl(url, title, items); } catch (SAXException e) { logger.error("Exception occurred during parsing the RSS feed", e); } catch (IOException e) { logger.error("Exception occurred during fetching the RSS feed", e); } } catch (ParserConfigurationException e) { logger.error("Exception occurred during parsing the RSS feed", e); } if (rssItems == null) { rssItems = new RSSImpl(); } return rssItems; } public int getStatus() { return store == null ? 500 : 200; } }
3,967
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/RSSConstants.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss; public class RSSConstants { // Edge constants public static final String EDGE_WEB_RESOURCE_ROOT_PATH = "edge"; public static final String EDGE_WEB_RESOURCE_GET_PATH = "get"; // Middletier constants public static final String MIDDLETIER_EUREKA_SERVICE_NAME = "middletier"; // Hystrix public static final String MIDDLETIER_HYSTRIX_THREAD_POOL = "MiddleTierThreadPool"; public static final String HYSTRIX_RSS_THREAD_POOL = "RSSThreadPool"; public static final String HYSTRIX_RSS_MUTATIONS_GROUP = "RSSMutationsGroup"; public static final String HYSTRIX_RSS_GET_GROUP = "RSSGetGroup"; public static final String HYSTRIX_RSS_ADD_COMMAND_KEY = "RSSAdd"; public static final String HYSTRIX_RSS_DEL_COMMAND_KEY = "RSSDel"; public static final String HYSTRIX_RSS_GET_COMMAND_KEY = "RSSGet"; public static final String HYSTRIX_STREAM_PATH = "/hystrix.stream"; public static final String MIDDLETIER_WEB_RESOURCE_ROOT_PATH = "middletier"; public static final String MIDDLETIER_WEB_RESOURCE_GET_PATH = "get"; // Rest Client public static final String MIDDLETIER_REST_CLIENT = "middletier-client"; // Default user name public static final String DEFUALT_USER = "default"; // REST Entry points public static final String RSS_ENTRY_POINT = "/rss/user/" + DEFUALT_USER; // RSS Store public static final String RSS_STORE = "rss.store"; public static final String RSS_STORE_CASSANDRA = "cassandra"; public static final String RSS_STORE_INMEMORY = "inmemory"; // Cassandra meta data public static final String CASSANDRA_HOST = "cassandra.host"; public static final String CASSANDRA_PORT = "cassandra.port"; public static final String CASSANDRA_MAXCONNSPERHOST = "cassandra.maxConnectionsPerHost"; public static final String CASSANDRA_KEYSPACE = "cassandra.keyspace"; public static final String CASSANDRA_COLUMNFAMILY = "cassandra.columnfamily"; // Jetty public static final String JETTY_HTTP_PORT = "jetty.http.port"; public static final String WEBAPPS_DIR = "rss-edge/webapp"; }
3,968
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/RSSConfiguration.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.inject.Singleton; import com.netflix.config.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.PostConstruct; import java.io.IOException; import com.netflix.governator.annotations.AutoBindSingleton; /** * RSSConfiguration follows a hierarchy as follows: <appId>-<env>.properties * (optional: <env>=local|dev|qa|prod) <appId>.properties (default values) * System Properties (-D) * * JMX: All properties can be viewed and updated (on a per instance basis) here: * Config-com.netflix.config.jmx.BaseConfigMBean * * @author Chris Fregly (chris@fregly.com) */ @AutoBindSingleton(AppConfiguration.class) public class RSSConfiguration implements AppConfiguration { private static final Logger logger = LoggerFactory.getLogger(RSSConfiguration.class); private boolean initialized = false; public RSSConfiguration() { } public String getString(String key, String defaultValue) { final DynamicStringProperty property = DynamicPropertyFactory.getInstance().getStringProperty(key, defaultValue); return property.get(); } public int getInt(String key, int defaultValue) { final DynamicIntProperty property = DynamicPropertyFactory.getInstance().getIntProperty(key, defaultValue); return property.get(); } public long getLong(String key, int defaultValue) { final DynamicLongProperty property = DynamicPropertyFactory.getInstance().getLongProperty(key, defaultValue); return property.get(); } public boolean getBoolean(String key, boolean defaultValue) { final DynamicBooleanProperty property = DynamicPropertyFactory.getInstance().getBooleanProperty(key, defaultValue); return property.get(); } @VisibleForTesting public void setOverrideProperty(String key, Object value) { Preconditions.checkState(initialized, "Must initialize RSSConfiguration before use."); ((ConcurrentCompositeConfiguration) ConfigurationManager .getConfigInstance()).setOverrideProperty(key, value); } public void close() { } }
3,969
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/AppConfiguration.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss; import java.io.Closeable; /** * @author Chris Fregly (chris@fregly.com) */ public interface AppConfiguration extends Closeable { public String getString(String key, String defaultValue); public int getInt(String key, int defaultValue); public long getLong(String key, int defaultValue); public boolean getBoolean(String key, boolean defaultValue); /** * Sets an instance-level override. This will trump everything including * dynamic properties and system properties. Useful for tests. * * @param key * @param value */ public void setOverrideProperty(String key, Object value); }
3,970
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/util/DescriptiveThreadFactory.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.util; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; /** * Adds descriptive thread names for debugging purposes. Allows priority and daemon to be set, as well. * * @author Chris Fregly (chris@fregly.com) */ public class DescriptiveThreadFactory implements ThreadFactory { private final String description; private final int priority; private final boolean daemon; private final AtomicInteger n = new AtomicInteger(1); public DescriptiveThreadFactory(String description) { this(description, Thread.NORM_PRIORITY, false); } public DescriptiveThreadFactory(String description, int priority, boolean daemon) { this.description = description; this.priority = priority; this.daemon = daemon; } public Thread newThread(Runnable runnable) { String threadDescription = description + "-" + n.getAndIncrement(); Thread thread = new Thread(runnable, threadDescription); thread.setPriority(priority); thread.setDaemon(daemon); return thread; } }
3,971
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/util/RSSModule.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.util; import com.netflix.recipes.rss.AppConfiguration; import com.netflix.recipes.rss.RSSConfiguration; import com.google.inject.AbstractModule; public class RSSModule extends AbstractModule { @Override protected void configure() { bind(AppConfiguration.class).to(RSSConfiguration.class); } }
3,972
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/server/BaseJettyServer.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.server; import java.io.Closeable; import org.apache.jasper.servlet.JspServlet; import org.mortbay.jetty.Server; import org.mortbay.jetty.servlet.Context; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Injector; import com.netflix.blitz4j.LoggingConfiguration; import com.netflix.config.ConfigurationManager; import com.netflix.config.DynamicPropertyFactory; import com.netflix.hystrix.contrib.metrics.eventstream.HystrixMetricsStreamServlet; import com.netflix.karyon.server.KaryonServer; import com.netflix.recipes.rss.RSSConstants; /** * Base Jetty Server * * @author Chris Fregly (chris@fregly.com) */ public class BaseJettyServer implements Closeable { static { LoggingConfiguration.getInstance().configure(); } private static final Logger logger = LoggerFactory.getLogger(BaseJettyServer.class); private final Server jettyServer; private final KaryonServer karyonServer; protected final Injector injector; public BaseJettyServer() { System.setProperty(DynamicPropertyFactory.ENABLE_JMX, "true"); this.karyonServer = new KaryonServer(); this.injector = karyonServer.initialize(); this.jettyServer = new Server(); } public void start() { final int port = ConfigurationManager.getConfigInstance().getInt(RSSConstants.JETTY_HTTP_PORT, Integer.MIN_VALUE); final Context context = new Context(jettyServer, "/", Context.SESSIONS); context.setResourceBase(RSSConstants.WEBAPPS_DIR); context.setClassLoader(Thread.currentThread().getContextClassLoader()); context.addServlet(JspServlet.class, "*.jsp"); // Enable hystrix.stream context.addServlet(HystrixMetricsStreamServlet.class, RSSConstants.HYSTRIX_STREAM_PATH); final Server server = new Server(port); server.setHandler(context); try { karyonServer.start(); server.start(); } catch (Exception exc) { throw new RuntimeException("Cannot start karyon server ...", exc); } } public void close() { try { jettyServer.stop(); karyonServer.close(); } catch (Exception exc) { logger.error("Error stopping jetty ...", exc); } LoggingConfiguration.getInstance().stop(); } }
3,973
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/server/BaseNettyServer.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.server; import java.io.Closeable; import com.google.common.io.Closeables; import com.google.inject.Injector; import com.netflix.blitz4j.LoggingConfiguration; import com.netflix.config.ConfigurationManager; import com.netflix.config.DynamicPropertyFactory; import com.netflix.karyon.server.KaryonServer; import com.netflix.recipes.rss.netty.NettyHandlerContainer; import com.netflix.recipes.rss.netty.NettyServer; import com.sun.jersey.api.container.ContainerFactory; import com.sun.jersey.api.core.PackagesResourceConfig; /** * @author Chris Fregly (chris@fregly.com) */ public class BaseNettyServer implements Closeable { static { LoggingConfiguration.getInstance().configure(); } public NettyServer nettyServer; public final KaryonServer karyonServer; public String host; public int port; protected final Injector injector; //protected AppConfiguration config; public BaseNettyServer() { // This must be set before karyonServer.initialize() otherwise the // archaius properties will not be available in JMX/jconsole System.setProperty(DynamicPropertyFactory.ENABLE_JMX, "true"); this.karyonServer = new KaryonServer(); this.injector = karyonServer.initialize(); } public void start() { this.host = ConfigurationManager.getConfigInstance().getString("netty.http.host", "not-found-in-configuration"); this.port = ConfigurationManager.getConfigInstance().getInt("netty.http.port", Integer.MIN_VALUE); final PackagesResourceConfig rcf = new PackagesResourceConfig(ConfigurationManager.getConfigInstance().getString("jersey.resources.package","not-found-in-configuration")); nettyServer = NettyServer .builder() .host(host) .port(port) .addHandler( "jerseyHandler", ContainerFactory.createContainer( NettyHandlerContainer.class, rcf)) .numBossThreads(NettyServer.cpus) .numWorkerThreads(NettyServer.cpus * 4).build(); try { karyonServer.start(); } catch (Exception exc) { throw new RuntimeException("Cannot start karyon server.", exc); } } public void close() { Closeables.closeQuietly(nettyServer); Closeables.closeQuietly(karyonServer); LoggingConfiguration.getInstance().stop(); } }
3,974
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/netty/JerseyContainerProvider.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.netty; import com.sun.jersey.api.container.ContainerException; import com.sun.jersey.api.core.ResourceConfig; import com.sun.jersey.spi.container.ContainerProvider; import com.sun.jersey.spi.container.WebApplication; /** * This class is referenced in the following jersey configuration file: * * src/main/resources/META-INF/services/com.sun.jersey.spi.container. * ContainerProvider * * @author Chris Fregly (chris@fregly.com) */ public class JerseyContainerProvider implements ContainerProvider<NettyHandlerContainer> { public NettyHandlerContainer createContainer(Class<NettyHandlerContainer> clazz, ResourceConfig config,WebApplication webApp) throws ContainerException { if (clazz != NettyHandlerContainer.class) { return null; } return new NettyHandlerContainer(webApp, config); } }
3,975
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/netty/NettyHandlerContainer.java
/** * The MIT License * * Copyright (c) 2009 Carl Bystrom * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * Snagged from the following RenderShark component: * https://code.google.com/p/rendershark * /source/browse/trunk/rendershark/src/main * /java/com/sun/jersey/server/impl/container/netty/NettyHandlerContainer.java * */ package com.netflix.recipes.rss.netty; import com.sun.jersey.api.core.ResourceConfig; import com.sun.jersey.core.header.InBoundHeaders; import com.sun.jersey.spi.container.ContainerRequest; import com.sun.jersey.spi.container.ContainerResponse; import com.sun.jersey.spi.container.ContainerResponseWriter; import com.sun.jersey.spi.container.WebApplication; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBufferInputStream; import org.jboss.netty.buffer.ChannelBufferOutputStream; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.*; import org.jboss.netty.channel.ChannelHandler.Sharable; import org.jboss.netty.handler.codec.http.*; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Map; @Sharable public class NettyHandlerContainer extends SimpleChannelUpstreamHandler { public static final String PROPERTY_BASE_URI = "com.sun.jersey.server.impl.container.netty.baseUri"; private final WebApplication application; private final String baseUri; NettyHandlerContainer(WebApplication application, ResourceConfig resourceConfig) { this.application = application; this.baseUri = (String) resourceConfig.getProperty(PROPERTY_BASE_URI); } private final static class Writer implements ContainerResponseWriter { private final Channel channel; private HttpResponse response; private Writer(Channel channel) { this.channel = channel; } public OutputStream writeStatusAndHeaders(long contentLength, ContainerResponse cResponse) throws IOException { response = new DefaultHttpResponse(HttpVersion.HTTP_1_0, HttpResponseStatus.valueOf(cResponse.getStatus())); for (Map.Entry<String, List<Object>> e : cResponse.getHttpHeaders().entrySet()) { List<String> values = new ArrayList<String>(); for (Object v : e.getValue()) values.add(ContainerResponse.getHeaderValue(v)); response.setHeader(e.getKey(), values); } ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); response.setContent(buffer); return new ChannelBufferOutputStream(buffer); } public void finish() throws IOException { // Streaming is not supported. Entire response will be written // downstream once finish() is called. channel.write(response).addListener(ChannelFutureListener.CLOSE); } } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent e) throws Exception { HttpRequest request = (HttpRequest) e.getMessage(); String base = getBaseUri(request); URI baseUri = new URI(base); URI requestUri = new URI(base.substring(0, base.length() - 1) + request.getUri()); ContainerRequest cRequest = new ContainerRequest(application, request .getMethod().getName(), baseUri, requestUri, getHeaders(request), new ChannelBufferInputStream( request.getContent())); application.handleRequest(cRequest, new Writer(e.getChannel())); } private String getBaseUri(HttpRequest request) { if (baseUri != null) { return baseUri; } return "http://" + request.getHeader(HttpHeaders.Names.HOST) + "/"; } private InBoundHeaders getHeaders(HttpRequest request) { InBoundHeaders headers = new InBoundHeaders(); for (String name : request.getHeaderNames()) { headers.put(name, request.getHeaders(name)); } return headers; } }
3,976
0
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss
Create_ds/recipes-rss/rss-core/src/main/java/com/netflix/recipes/rss/netty/NettyServer.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.recipes.rss.netty; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.netflix.recipes.rss.util.DescriptiveThreadFactory; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.*; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.group.DefaultChannelGroup; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.handler.codec.http.HttpRequestDecoder; import org.jboss.netty.handler.codec.http.HttpResponseEncoder; import org.jboss.netty.handler.execution.ExecutionHandler; import org.jboss.netty.logging.InternalLoggerFactory; import org.jboss.netty.logging.Slf4JLoggerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.lang.Thread.UncaughtExceptionHandler; import java.net.InetSocketAddress; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** * NettyServer and Builder * * @author Chris Fregly (chris@fregly.com) */ public final class NettyServer implements Closeable { private static final Logger logger = LoggerFactory.getLogger(NettyServer.class); public static final int cpus = Runtime.getRuntime().availableProcessors(); private ChannelGroup channelGroup = new DefaultChannelGroup(); static { InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory()); Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() { public void uncaughtException(Thread thread, Throwable exc) { logger.error("Thread Exc {}", thread.getName(), exc); for (Throwable exc2 = exc; exc2 != null; exc2 = exc2.getCause()) { if (exc2 instanceof OutOfMemoryError) throw new RuntimeException("OutOfMemoryError"); } } }); } public String getListenHost() { return ((InetSocketAddress) channelGroup.find(1).getLocalAddress()).getHostName(); } public int getListenPort() { return ((InetSocketAddress) channelGroup.find(1).getLocalAddress()).getPort(); } public void addChannel(Channel channel) { channelGroup.add(channel); } /** * @return Builder object which will help build the client and server */ public static Builder builder() { return new Builder(); } public static class Builder { private String host; private int port = 0; // default is any port private Map<String, ChannelHandler> handlers = Maps.newHashMap(); private ChannelHandler encoder = new HttpResponseEncoder(); private ChannelHandler decoder = new HttpRequestDecoder(); private int numBossThreads = cpus; // IO boss threads private int numWorkerThreads = cpus * 4; // worker threads public Builder host(String host) { this.host = host; return this; } public Builder port(int port) { this.port = port; return this; } public Builder addHandler(String name, ChannelHandler handler) { Preconditions.checkNotNull(handler); handlers.put(name, handler); return this; } public Builder encoder(ChannelHandler encoder) { this.encoder = encoder; return this; } public Builder decoder(ChannelHandler decoder) { this.decoder = decoder; return this; } public Builder numBossThreads(int numBossThreads) { this.numBossThreads = numBossThreads; return this; } public Builder numWorkerThreads(int numWorkerThreads) { this.numWorkerThreads = numWorkerThreads; return this; } /** * Builds and starts netty */ public NettyServer build() { PipelineFactory factory = new PipelineFactory(handlers, encoder, decoder, numBossThreads); ThreadPoolExecutor bossPool = new ThreadPoolExecutor( numBossThreads, numBossThreads, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new DescriptiveThreadFactory("Boss-Thread")); ThreadPoolExecutor workerPool = new ThreadPoolExecutor( numWorkerThreads, numWorkerThreads, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new DescriptiveThreadFactory("Worker-Thread")); ChannelFactory nioServer = new NioServerSocketChannelFactory( bossPool, workerPool, numWorkerThreads); ServerBootstrap serverBootstrap = new ServerBootstrap(nioServer); serverBootstrap.setOption("reuseAddress", true); serverBootstrap.setOption("keepAlive", true); serverBootstrap.setPipelineFactory(factory); Channel serverChannel = serverBootstrap.bind(new InetSocketAddress( host, port)); logger.info("Started netty server {}:{}", host, port); NettyServer server = new NettyServer(); server.addChannel(serverChannel); return server; } } public static class PipelineFactory implements ChannelPipelineFactory { static final String CHANNEL_HANDLERS = "channelHandlers"; static final String ENCODER_NAME = "encoder"; static final String DECODER_NAME = "decoder"; final ChannelHandler executionHandler; final Map<String, ChannelHandler> handlers; final ChannelHandler encoder; final ChannelHandler decoder; public PipelineFactory(Map<String, ChannelHandler> handlers, ChannelHandler encoder, ChannelHandler decoder, int numThreads) { this.handlers = handlers; this.encoder = encoder; this.decoder = decoder; if (numThreads != 0) { ThreadPoolExecutor executorThreadPool = new ThreadPoolExecutor( NettyServer.cpus, NettyServer.cpus * 4, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new DescriptiveThreadFactory("Executor-Thread")); this.executionHandler = new ExecutionHandler(executorThreadPool); } else { this.executionHandler = null; } } public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("executionHandler", executionHandler); pipeline.addLast(DECODER_NAME, decoder); pipeline.addLast(ENCODER_NAME, encoder); for (Entry<String, ChannelHandler> handler : handlers.entrySet()) { pipeline.addLast(handler.getKey(), handler.getValue()); } return pipeline; } } public static class ClientPipelineFactory extends PipelineFactory { public ClientPipelineFactory(Map<String, ChannelHandler> handlers, ChannelHandler encoder, ChannelHandler decoder) { super(handlers, encoder, decoder, 0); } public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast(DECODER_NAME, decoder); pipeline.addLast(ENCODER_NAME, encoder); for (Entry<String, ChannelHandler> handler : handlers.entrySet()) { pipeline.addLast(handler.getKey(), handler.getValue()); } return pipeline; } } public void close() { channelGroup.close(); } private NettyServer() { } }
3,977
0
Create_ds/airbnb-spark-thrift/src/test/scala/com/airbnb/spark
Create_ds/airbnb-spark-thrift/src/test/scala/com/airbnb/spark/thrift/TestEnum.java
/** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.airbnb.spark.thrift; import java.util.Map; import java.util.HashMap; import org.apache.thrift.TEnum; public enum TestEnum implements org.apache.thrift.TEnum { TWEET(0), RETWEET(2), DM(10), REPLY(11); private final int value; private TestEnum(int value) { this.value = value; } /** * Get the integer value of this enum value, as defined in the Thrift IDL. */ public int getValue() { return value; } /** * Find a the enum type by its integer value, as defined in the Thrift IDL. * @return null if the value is not found. */ public static TestEnum findByValue(int value) { switch (value) { case 0: return TWEET; case 2: return RETWEET; case 10: return DM; case 11: return REPLY; default: return null; } } }
3,978
0
Create_ds/airbnb-spark-thrift/src/test/scala/com/airbnb/spark
Create_ds/airbnb-spark-thrift/src/test/scala/com/airbnb/spark/thrift/StructSimple.java
/** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.airbnb.spark.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2017-4-21") public class StructSimple implements org.apache.thrift.TBase<StructSimple, StructSimple._Fields>, java.io.Serializable, Cloneable, Comparable<StructSimple> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StructSimple"); private static final org.apache.thrift.protocol.TField ID16_FIELD_DESC = new org.apache.thrift.protocol.TField("id16", org.apache.thrift.protocol.TType.I16, (short)1); private static final org.apache.thrift.protocol.TField ID32_FIELD_DESC = new org.apache.thrift.protocol.TField("id32", org.apache.thrift.protocol.TType.I32, (short)2); private static final org.apache.thrift.protocol.TField ID64_FIELD_DESC = new org.apache.thrift.protocol.TField("id64", org.apache.thrift.protocol.TType.I64, (short)3); private static final org.apache.thrift.protocol.TField BIN1_FIELD_DESC = new org.apache.thrift.protocol.TField("bin1", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField B1_FIELD_DESC = new org.apache.thrift.protocol.TField("b1", org.apache.thrift.protocol.TType.BOOL, (short)5); private static final org.apache.thrift.protocol.TField D1_FIELD_DESC = new org.apache.thrift.protocol.TField("d1", org.apache.thrift.protocol.TType.DOUBLE, (short)6); private static final org.apache.thrift.protocol.TField STR1_FIELD_DESC = new org.apache.thrift.protocol.TField("str1", org.apache.thrift.protocol.TType.STRING, (short)7); private static final org.apache.thrift.protocol.TField L1_FIELD_DESC = new org.apache.thrift.protocol.TField("l1", org.apache.thrift.protocol.TType.LIST, (short)8); private static final org.apache.thrift.protocol.TField M1_FIELD_DESC = new org.apache.thrift.protocol.TField("m1", org.apache.thrift.protocol.TType.MAP, (short)9); private static final org.apache.thrift.protocol.TField S1_FIELD_DESC = new org.apache.thrift.protocol.TField("s1", org.apache.thrift.protocol.TType.SET, (short)10); private static final org.apache.thrift.protocol.TField F1_FIELD_DESC = new org.apache.thrift.protocol.TField("f1", org.apache.thrift.protocol.TType.STRUCT, (short)11); private static final org.apache.thrift.protocol.TField FOO_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("fooList", org.apache.thrift.protocol.TType.LIST, (short)12); private static final org.apache.thrift.protocol.TField FOO_MAP_FIELD_DESC = new org.apache.thrift.protocol.TField("fooMap", org.apache.thrift.protocol.TType.MAP, (short)13); private static final org.apache.thrift.protocol.TField OPTION_STR_FIELD_DESC = new org.apache.thrift.protocol.TField("option_str", org.apache.thrift.protocol.TType.STRING, (short)14); private static final org.apache.thrift.protocol.TField E_FIELD_DESC = new org.apache.thrift.protocol.TField("e", org.apache.thrift.protocol.TType.I32, (short)15); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new StructSimpleStandardSchemeFactory()); schemes.put(TupleScheme.class, new StructSimpleTupleSchemeFactory()); } public short id16; // required public int id32; // required public long id64; // required public ByteBuffer bin1; // required public boolean b1; // required public double d1; // required public String str1; // required public List<Long> l1; // required public Map<String,Boolean> m1; // required public Set<Double> s1; // required public Foo f1; // required public List<Foo> fooList; // required public Map<String,Foo> fooMap; // required public String option_str; // optional public TestEnum e; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ID16((short)1, "id16"), ID32((short)2, "id32"), ID64((short)3, "id64"), BIN1((short)4, "bin1"), B1((short)5, "b1"), D1((short)6, "d1"), STR1((short)7, "str1"), L1((short)8, "l1"), M1((short)9, "m1"), S1((short)10, "s1"), F1((short)11, "f1"), FOO_LIST((short)12, "fooList"), FOO_MAP((short)13, "fooMap"), OPTION_STR((short)14, "option_str"), E((short)15, "e"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // ID16 return ID16; case 2: // ID32 return ID32; case 3: // ID64 return ID64; case 4: // BIN1 return BIN1; case 5: // B1 return B1; case 6: // D1 return D1; case 7: // STR1 return STR1; case 8: // L1 return L1; case 9: // M1 return M1; case 10: // S1 return S1; case 11: // F1 return F1; case 12: // FOO_LIST return FOO_LIST; case 13: // FOO_MAP return FOO_MAP; case 14: // OPTION_STR return OPTION_STR; case 15: // E return E; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __ID16_ISSET_ID = 0; private static final int __ID32_ISSET_ID = 1; private static final int __ID64_ISSET_ID = 2; private static final int __B1_ISSET_ID = 3; private static final int __D1_ISSET_ID = 4; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.OPTION_STR}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.ID16, new org.apache.thrift.meta_data.FieldMetaData("id16", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.ID32, new org.apache.thrift.meta_data.FieldMetaData("id32", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.ID64, new org.apache.thrift.meta_data.FieldMetaData("id64", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.BIN1, new org.apache.thrift.meta_data.FieldMetaData("bin1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); tmpMap.put(_Fields.B1, new org.apache.thrift.meta_data.FieldMetaData("b1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); tmpMap.put(_Fields.D1, new org.apache.thrift.meta_data.FieldMetaData("d1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE))); tmpMap.put(_Fields.STR1, new org.apache.thrift.meta_data.FieldMetaData("str1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.L1, new org.apache.thrift.meta_data.FieldMetaData("l1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)))); tmpMap.put(_Fields.M1, new org.apache.thrift.meta_data.FieldMetaData("m1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)))); tmpMap.put(_Fields.S1, new org.apache.thrift.meta_data.FieldMetaData("s1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)))); tmpMap.put(_Fields.F1, new org.apache.thrift.meta_data.FieldMetaData("f1", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT , "Foo"))); tmpMap.put(_Fields.FOO_LIST, new org.apache.thrift.meta_data.FieldMetaData("fooList", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT , "Foo")))); tmpMap.put(_Fields.FOO_MAP, new org.apache.thrift.meta_data.FieldMetaData("fooMap", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT , "Foo")))); tmpMap.put(_Fields.OPTION_STR, new org.apache.thrift.meta_data.FieldMetaData("option_str", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.E, new org.apache.thrift.meta_data.FieldMetaData("e", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.ENUM , "TestEnum"))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StructSimple.class, metaDataMap); } public StructSimple() { } public StructSimple( short id16, int id32, long id64, ByteBuffer bin1, boolean b1, double d1, String str1, List<Long> l1, Map<String,Boolean> m1, Set<Double> s1, Foo f1, List<Foo> fooList, Map<String,Foo> fooMap, TestEnum e) { this(); this.id16 = id16; setId16IsSet(true); this.id32 = id32; setId32IsSet(true); this.id64 = id64; setId64IsSet(true); this.bin1 = org.apache.thrift.TBaseHelper.copyBinary(bin1); this.b1 = b1; setB1IsSet(true); this.d1 = d1; setD1IsSet(true); this.str1 = str1; this.l1 = l1; this.m1 = m1; this.s1 = s1; this.f1 = f1; this.fooList = fooList; this.fooMap = fooMap; this.e = e; } /** * Performs a deep copy on <i>other</i>. */ public StructSimple(StructSimple other) { __isset_bitfield = other.__isset_bitfield; this.id16 = other.id16; this.id32 = other.id32; this.id64 = other.id64; if (other.isSetBin1()) { this.bin1 = org.apache.thrift.TBaseHelper.copyBinary(other.bin1); } this.b1 = other.b1; this.d1 = other.d1; if (other.isSetStr1()) { this.str1 = other.str1; } if (other.isSetL1()) { List<Long> __this__l1 = new ArrayList<Long>(other.l1); this.l1 = __this__l1; } if (other.isSetM1()) { Map<String,Boolean> __this__m1 = new HashMap<String,Boolean>(other.m1); this.m1 = __this__m1; } if (other.isSetS1()) { Set<Double> __this__s1 = new HashSet<Double>(other.s1); this.s1 = __this__s1; } if (other.isSetF1()) { this.f1 = other.f1; } if (other.isSetFooList()) { List<Foo> __this__fooList = new ArrayList<Foo>(other.fooList.size()); for (Foo other_element : other.fooList) { __this__fooList.add(other_element); } this.fooList = __this__fooList; } if (other.isSetFooMap()) { Map<String,Foo> __this__fooMap = new HashMap<String,Foo>(other.fooMap.size()); for (Map.Entry<String, Foo> other_element : other.fooMap.entrySet()) { String other_element_key = other_element.getKey(); Foo other_element_value = other_element.getValue(); String __this__fooMap_copy_key = other_element_key; Foo __this__fooMap_copy_value = other_element_value; __this__fooMap.put(__this__fooMap_copy_key, __this__fooMap_copy_value); } this.fooMap = __this__fooMap; } if (other.isSetOption_str()) { this.option_str = other.option_str; } if (other.isSetE()) { this.e = other.e; } } public StructSimple deepCopy() { return new StructSimple(this); } @Override public void clear() { setId16IsSet(false); this.id16 = 0; setId32IsSet(false); this.id32 = 0; setId64IsSet(false); this.id64 = 0; this.bin1 = null; setB1IsSet(false); this.b1 = false; setD1IsSet(false); this.d1 = 0.0; this.str1 = null; this.l1 = null; this.m1 = null; this.s1 = null; this.f1 = null; this.fooList = null; this.fooMap = null; this.option_str = null; this.e = null; } public short getId16() { return this.id16; } public StructSimple setId16(short id16) { this.id16 = id16; setId16IsSet(true); return this; } public void unsetId16() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID16_ISSET_ID); } /** Returns true if field id16 is set (has been assigned a value) and false otherwise */ public boolean isSetId16() { return EncodingUtils.testBit(__isset_bitfield, __ID16_ISSET_ID); } public void setId16IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID16_ISSET_ID, value); } public int getId32() { return this.id32; } public StructSimple setId32(int id32) { this.id32 = id32; setId32IsSet(true); return this; } public void unsetId32() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID32_ISSET_ID); } /** Returns true if field id32 is set (has been assigned a value) and false otherwise */ public boolean isSetId32() { return EncodingUtils.testBit(__isset_bitfield, __ID32_ISSET_ID); } public void setId32IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID32_ISSET_ID, value); } public long getId64() { return this.id64; } public StructSimple setId64(long id64) { this.id64 = id64; setId64IsSet(true); return this; } public void unsetId64() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID64_ISSET_ID); } /** Returns true if field id64 is set (has been assigned a value) and false otherwise */ public boolean isSetId64() { return EncodingUtils.testBit(__isset_bitfield, __ID64_ISSET_ID); } public void setId64IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID64_ISSET_ID, value); } public byte[] getBin1() { setBin1(org.apache.thrift.TBaseHelper.rightSize(bin1)); return bin1 == null ? null : bin1.array(); } public ByteBuffer bufferForBin1() { return org.apache.thrift.TBaseHelper.copyBinary(bin1); } public StructSimple setBin1(byte[] bin1) { this.bin1 = bin1 == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(bin1, bin1.length)); return this; } public StructSimple setBin1(ByteBuffer bin1) { this.bin1 = org.apache.thrift.TBaseHelper.copyBinary(bin1); return this; } public void unsetBin1() { this.bin1 = null; } /** Returns true if field bin1 is set (has been assigned a value) and false otherwise */ public boolean isSetBin1() { return this.bin1 != null; } public void setBin1IsSet(boolean value) { if (!value) { this.bin1 = null; } } public boolean isB1() { return this.b1; } public StructSimple setB1(boolean b1) { this.b1 = b1; setB1IsSet(true); return this; } public void unsetB1() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __B1_ISSET_ID); } /** Returns true if field b1 is set (has been assigned a value) and false otherwise */ public boolean isSetB1() { return EncodingUtils.testBit(__isset_bitfield, __B1_ISSET_ID); } public void setB1IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __B1_ISSET_ID, value); } public double getD1() { return this.d1; } public StructSimple setD1(double d1) { this.d1 = d1; setD1IsSet(true); return this; } public void unsetD1() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __D1_ISSET_ID); } /** Returns true if field d1 is set (has been assigned a value) and false otherwise */ public boolean isSetD1() { return EncodingUtils.testBit(__isset_bitfield, __D1_ISSET_ID); } public void setD1IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __D1_ISSET_ID, value); } public String getStr1() { return this.str1; } public StructSimple setStr1(String str1) { this.str1 = str1; return this; } public void unsetStr1() { this.str1 = null; } /** Returns true if field str1 is set (has been assigned a value) and false otherwise */ public boolean isSetStr1() { return this.str1 != null; } public void setStr1IsSet(boolean value) { if (!value) { this.str1 = null; } } public int getL1Size() { return (this.l1 == null) ? 0 : this.l1.size(); } public java.util.Iterator<Long> getL1Iterator() { return (this.l1 == null) ? null : this.l1.iterator(); } public void addToL1(long elem) { if (this.l1 == null) { this.l1 = new ArrayList<Long>(); } this.l1.add(elem); } public List<Long> getL1() { return this.l1; } public StructSimple setL1(List<Long> l1) { this.l1 = l1; return this; } public void unsetL1() { this.l1 = null; } /** Returns true if field l1 is set (has been assigned a value) and false otherwise */ public boolean isSetL1() { return this.l1 != null; } public void setL1IsSet(boolean value) { if (!value) { this.l1 = null; } } public int getM1Size() { return (this.m1 == null) ? 0 : this.m1.size(); } public void putToM1(String key, boolean val) { if (this.m1 == null) { this.m1 = new HashMap<String,Boolean>(); } this.m1.put(key, val); } public Map<String,Boolean> getM1() { return this.m1; } public StructSimple setM1(Map<String,Boolean> m1) { this.m1 = m1; return this; } public void unsetM1() { this.m1 = null; } /** Returns true if field m1 is set (has been assigned a value) and false otherwise */ public boolean isSetM1() { return this.m1 != null; } public void setM1IsSet(boolean value) { if (!value) { this.m1 = null; } } public int getS1Size() { return (this.s1 == null) ? 0 : this.s1.size(); } public java.util.Iterator<Double> getS1Iterator() { return (this.s1 == null) ? null : this.s1.iterator(); } public void addToS1(double elem) { if (this.s1 == null) { this.s1 = new HashSet<Double>(); } this.s1.add(elem); } public Set<Double> getS1() { return this.s1; } public StructSimple setS1(Set<Double> s1) { this.s1 = s1; return this; } public void unsetS1() { this.s1 = null; } /** Returns true if field s1 is set (has been assigned a value) and false otherwise */ public boolean isSetS1() { return this.s1 != null; } public void setS1IsSet(boolean value) { if (!value) { this.s1 = null; } } public Foo getF1() { return this.f1; } public StructSimple setF1(Foo f1) { this.f1 = f1; return this; } public void unsetF1() { this.f1 = null; } /** Returns true if field f1 is set (has been assigned a value) and false otherwise */ public boolean isSetF1() { return this.f1 != null; } public void setF1IsSet(boolean value) { if (!value) { this.f1 = null; } } public int getFooListSize() { return (this.fooList == null) ? 0 : this.fooList.size(); } public java.util.Iterator<Foo> getFooListIterator() { return (this.fooList == null) ? null : this.fooList.iterator(); } public void addToFooList(Foo elem) { if (this.fooList == null) { this.fooList = new ArrayList<Foo>(); } this.fooList.add(elem); } public List<Foo> getFooList() { return this.fooList; } public StructSimple setFooList(List<Foo> fooList) { this.fooList = fooList; return this; } public void unsetFooList() { this.fooList = null; } /** Returns true if field fooList is set (has been assigned a value) and false otherwise */ public boolean isSetFooList() { return this.fooList != null; } public void setFooListIsSet(boolean value) { if (!value) { this.fooList = null; } } public int getFooMapSize() { return (this.fooMap == null) ? 0 : this.fooMap.size(); } public void putToFooMap(String key, Foo val) { if (this.fooMap == null) { this.fooMap = new HashMap<String,Foo>(); } this.fooMap.put(key, val); } public Map<String,Foo> getFooMap() { return this.fooMap; } public StructSimple setFooMap(Map<String,Foo> fooMap) { this.fooMap = fooMap; return this; } public void unsetFooMap() { this.fooMap = null; } /** Returns true if field fooMap is set (has been assigned a value) and false otherwise */ public boolean isSetFooMap() { return this.fooMap != null; } public void setFooMapIsSet(boolean value) { if (!value) { this.fooMap = null; } } public String getOption_str() { return this.option_str; } public StructSimple setOption_str(String option_str) { this.option_str = option_str; return this; } public void unsetOption_str() { this.option_str = null; } /** Returns true if field option_str is set (has been assigned a value) and false otherwise */ public boolean isSetOption_str() { return this.option_str != null; } public void setOption_strIsSet(boolean value) { if (!value) { this.option_str = null; } } public TestEnum getE() { return this.e; } public StructSimple setE(TestEnum e) { this.e = e; return this; } public void unsetE() { this.e = null; } /** Returns true if field e is set (has been assigned a value) and false otherwise */ public boolean isSetE() { return this.e != null; } public void setEIsSet(boolean value) { if (!value) { this.e = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case ID16: if (value == null) { unsetId16(); } else { setId16((Short)value); } break; case ID32: if (value == null) { unsetId32(); } else { setId32((Integer)value); } break; case ID64: if (value == null) { unsetId64(); } else { setId64((Long)value); } break; case BIN1: if (value == null) { unsetBin1(); } else { setBin1((ByteBuffer)value); } break; case B1: if (value == null) { unsetB1(); } else { setB1((Boolean)value); } break; case D1: if (value == null) { unsetD1(); } else { setD1((Double)value); } break; case STR1: if (value == null) { unsetStr1(); } else { setStr1((String)value); } break; case L1: if (value == null) { unsetL1(); } else { setL1((List<Long>)value); } break; case M1: if (value == null) { unsetM1(); } else { setM1((Map<String,Boolean>)value); } break; case S1: if (value == null) { unsetS1(); } else { setS1((Set<Double>)value); } break; case F1: if (value == null) { unsetF1(); } else { setF1((Foo)value); } break; case FOO_LIST: if (value == null) { unsetFooList(); } else { setFooList((List<Foo>)value); } break; case FOO_MAP: if (value == null) { unsetFooMap(); } else { setFooMap((Map<String,Foo>)value); } break; case OPTION_STR: if (value == null) { unsetOption_str(); } else { setOption_str((String)value); } break; case E: if (value == null) { unsetE(); } else { setE((TestEnum)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case ID16: return Short.valueOf(getId16()); case ID32: return Integer.valueOf(getId32()); case ID64: return Long.valueOf(getId64()); case BIN1: return getBin1(); case B1: return Boolean.valueOf(isB1()); case D1: return Double.valueOf(getD1()); case STR1: return getStr1(); case L1: return getL1(); case M1: return getM1(); case S1: return getS1(); case F1: return getF1(); case FOO_LIST: return getFooList(); case FOO_MAP: return getFooMap(); case OPTION_STR: return getOption_str(); case E: return getE(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case ID16: return isSetId16(); case ID32: return isSetId32(); case ID64: return isSetId64(); case BIN1: return isSetBin1(); case B1: return isSetB1(); case D1: return isSetD1(); case STR1: return isSetStr1(); case L1: return isSetL1(); case M1: return isSetM1(); case S1: return isSetS1(); case F1: return isSetF1(); case FOO_LIST: return isSetFooList(); case FOO_MAP: return isSetFooMap(); case OPTION_STR: return isSetOption_str(); case E: return isSetE(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof StructSimple) return this.equals((StructSimple)that); return false; } public boolean equals(StructSimple that) { if (that == null) return false; boolean this_present_id16 = true; boolean that_present_id16 = true; if (this_present_id16 || that_present_id16) { if (!(this_present_id16 && that_present_id16)) return false; if (this.id16 != that.id16) return false; } boolean this_present_id32 = true; boolean that_present_id32 = true; if (this_present_id32 || that_present_id32) { if (!(this_present_id32 && that_present_id32)) return false; if (this.id32 != that.id32) return false; } boolean this_present_id64 = true; boolean that_present_id64 = true; if (this_present_id64 || that_present_id64) { if (!(this_present_id64 && that_present_id64)) return false; if (this.id64 != that.id64) return false; } boolean this_present_bin1 = true && this.isSetBin1(); boolean that_present_bin1 = true && that.isSetBin1(); if (this_present_bin1 || that_present_bin1) { if (!(this_present_bin1 && that_present_bin1)) return false; if (!this.bin1.equals(that.bin1)) return false; } boolean this_present_b1 = true; boolean that_present_b1 = true; if (this_present_b1 || that_present_b1) { if (!(this_present_b1 && that_present_b1)) return false; if (this.b1 != that.b1) return false; } boolean this_present_d1 = true; boolean that_present_d1 = true; if (this_present_d1 || that_present_d1) { if (!(this_present_d1 && that_present_d1)) return false; if (this.d1 != that.d1) return false; } boolean this_present_str1 = true && this.isSetStr1(); boolean that_present_str1 = true && that.isSetStr1(); if (this_present_str1 || that_present_str1) { if (!(this_present_str1 && that_present_str1)) return false; if (!this.str1.equals(that.str1)) return false; } boolean this_present_l1 = true && this.isSetL1(); boolean that_present_l1 = true && that.isSetL1(); if (this_present_l1 || that_present_l1) { if (!(this_present_l1 && that_present_l1)) return false; if (!this.l1.equals(that.l1)) return false; } boolean this_present_m1 = true && this.isSetM1(); boolean that_present_m1 = true && that.isSetM1(); if (this_present_m1 || that_present_m1) { if (!(this_present_m1 && that_present_m1)) return false; if (!this.m1.equals(that.m1)) return false; } boolean this_present_s1 = true && this.isSetS1(); boolean that_present_s1 = true && that.isSetS1(); if (this_present_s1 || that_present_s1) { if (!(this_present_s1 && that_present_s1)) return false; if (!this.s1.equals(that.s1)) return false; } boolean this_present_f1 = true && this.isSetF1(); boolean that_present_f1 = true && that.isSetF1(); if (this_present_f1 || that_present_f1) { if (!(this_present_f1 && that_present_f1)) return false; if (!this.f1.equals(that.f1)) return false; } boolean this_present_fooList = true && this.isSetFooList(); boolean that_present_fooList = true && that.isSetFooList(); if (this_present_fooList || that_present_fooList) { if (!(this_present_fooList && that_present_fooList)) return false; if (!this.fooList.equals(that.fooList)) return false; } boolean this_present_fooMap = true && this.isSetFooMap(); boolean that_present_fooMap = true && that.isSetFooMap(); if (this_present_fooMap || that_present_fooMap) { if (!(this_present_fooMap && that_present_fooMap)) return false; if (!this.fooMap.equals(that.fooMap)) return false; } boolean this_present_option_str = true && this.isSetOption_str(); boolean that_present_option_str = true && that.isSetOption_str(); if (this_present_option_str || that_present_option_str) { if (!(this_present_option_str && that_present_option_str)) return false; if (!this.option_str.equals(that.option_str)) return false; } boolean this_present_e = true && this.isSetE(); boolean that_present_e = true && that.isSetE(); if (this_present_e || that_present_e) { if (!(this_present_e && that_present_e)) return false; if (!this.e.equals(that.e)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_id16 = true; list.add(present_id16); if (present_id16) list.add(id16); boolean present_id32 = true; list.add(present_id32); if (present_id32) list.add(id32); boolean present_id64 = true; list.add(present_id64); if (present_id64) list.add(id64); boolean present_bin1 = true && (isSetBin1()); list.add(present_bin1); if (present_bin1) list.add(bin1); boolean present_b1 = true; list.add(present_b1); if (present_b1) list.add(b1); boolean present_d1 = true; list.add(present_d1); if (present_d1) list.add(d1); boolean present_str1 = true && (isSetStr1()); list.add(present_str1); if (present_str1) list.add(str1); boolean present_l1 = true && (isSetL1()); list.add(present_l1); if (present_l1) list.add(l1); boolean present_m1 = true && (isSetM1()); list.add(present_m1); if (present_m1) list.add(m1); boolean present_s1 = true && (isSetS1()); list.add(present_s1); if (present_s1) list.add(s1); boolean present_f1 = true && (isSetF1()); list.add(present_f1); if (present_f1) list.add(f1); boolean present_fooList = true && (isSetFooList()); list.add(present_fooList); if (present_fooList) list.add(fooList); boolean present_fooMap = true && (isSetFooMap()); list.add(present_fooMap); if (present_fooMap) list.add(fooMap); boolean present_option_str = true && (isSetOption_str()); list.add(present_option_str); if (present_option_str) list.add(option_str); boolean present_e = true && (isSetE()); list.add(present_e); if (present_e) list.add(e.getValue()); return list.hashCode(); } @Override public int compareTo(StructSimple other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetId16()).compareTo(other.isSetId16()); if (lastComparison != 0) { return lastComparison; } if (isSetId16()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id16, other.id16); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetId32()).compareTo(other.isSetId32()); if (lastComparison != 0) { return lastComparison; } if (isSetId32()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id32, other.id32); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetId64()).compareTo(other.isSetId64()); if (lastComparison != 0) { return lastComparison; } if (isSetId64()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id64, other.id64); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetBin1()).compareTo(other.isSetBin1()); if (lastComparison != 0) { return lastComparison; } if (isSetBin1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.bin1, other.bin1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetB1()).compareTo(other.isSetB1()); if (lastComparison != 0) { return lastComparison; } if (isSetB1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.b1, other.b1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetD1()).compareTo(other.isSetD1()); if (lastComparison != 0) { return lastComparison; } if (isSetD1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.d1, other.d1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetStr1()).compareTo(other.isSetStr1()); if (lastComparison != 0) { return lastComparison; } if (isSetStr1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.str1, other.str1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetL1()).compareTo(other.isSetL1()); if (lastComparison != 0) { return lastComparison; } if (isSetL1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.l1, other.l1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetM1()).compareTo(other.isSetM1()); if (lastComparison != 0) { return lastComparison; } if (isSetM1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.m1, other.m1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetS1()).compareTo(other.isSetS1()); if (lastComparison != 0) { return lastComparison; } if (isSetS1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.s1, other.s1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetF1()).compareTo(other.isSetF1()); if (lastComparison != 0) { return lastComparison; } if (isSetF1()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.f1, other.f1); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetFooList()).compareTo(other.isSetFooList()); if (lastComparison != 0) { return lastComparison; } if (isSetFooList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fooList, other.fooList); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetFooMap()).compareTo(other.isSetFooMap()); if (lastComparison != 0) { return lastComparison; } if (isSetFooMap()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fooMap, other.fooMap); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetOption_str()).compareTo(other.isSetOption_str()); if (lastComparison != 0) { return lastComparison; } if (isSetOption_str()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.option_str, other.option_str); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetE()).compareTo(other.isSetE()); if (lastComparison != 0) { return lastComparison; } if (isSetE()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.e, other.e); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("StructSimple("); boolean first = true; sb.append("id16:"); sb.append(this.id16); first = false; if (!first) sb.append(", "); sb.append("id32:"); sb.append(this.id32); first = false; if (!first) sb.append(", "); sb.append("id64:"); sb.append(this.id64); first = false; if (!first) sb.append(", "); sb.append("bin1:"); if (this.bin1 == null) { sb.append("null"); } else { org.apache.thrift.TBaseHelper.toString(this.bin1, sb); } first = false; if (!first) sb.append(", "); sb.append("b1:"); sb.append(this.b1); first = false; if (!first) sb.append(", "); sb.append("d1:"); sb.append(this.d1); first = false; if (!first) sb.append(", "); sb.append("str1:"); if (this.str1 == null) { sb.append("null"); } else { sb.append(this.str1); } first = false; if (!first) sb.append(", "); sb.append("l1:"); if (this.l1 == null) { sb.append("null"); } else { sb.append(this.l1); } first = false; if (!first) sb.append(", "); sb.append("m1:"); if (this.m1 == null) { sb.append("null"); } else { sb.append(this.m1); } first = false; if (!first) sb.append(", "); sb.append("s1:"); if (this.s1 == null) { sb.append("null"); } else { sb.append(this.s1); } first = false; if (!first) sb.append(", "); sb.append("f1:"); if (this.f1 == null) { sb.append("null"); } else { sb.append(this.f1); } first = false; if (!first) sb.append(", "); sb.append("fooList:"); if (this.fooList == null) { sb.append("null"); } else { sb.append(this.fooList); } first = false; if (!first) sb.append(", "); sb.append("fooMap:"); if (this.fooMap == null) { sb.append("null"); } else { sb.append(this.fooMap); } first = false; if (isSetOption_str()) { if (!first) sb.append(", "); sb.append("option_str:"); if (this.option_str == null) { sb.append("null"); } else { sb.append(this.option_str); } first = false; } if (!first) sb.append(", "); sb.append("e:"); if (this.e == null) { sb.append("null"); } else { sb.append(this.e); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // alas, we cannot check 'id16' because it's a primitive and you chose the non-beans generator. // alas, we cannot check 'id32' because it's a primitive and you chose the non-beans generator. // alas, we cannot check 'id64' because it's a primitive and you chose the non-beans generator. if (bin1 == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'bin1' was not present! Struct: " + toString()); } // alas, we cannot check 'b1' because it's a primitive and you chose the non-beans generator. // alas, we cannot check 'd1' because it's a primitive and you chose the non-beans generator. if (str1 == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'str1' was not present! Struct: " + toString()); } if (l1 == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'l1' was not present! Struct: " + toString()); } if (m1 == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'm1' was not present! Struct: " + toString()); } if (s1 == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 's1' was not present! Struct: " + toString()); } if (f1 == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'f1' was not present! Struct: " + toString()); } if (fooList == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'fooList' was not present! Struct: " + toString()); } if (fooMap == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'fooMap' was not present! Struct: " + toString()); } if (e == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'e' was not present! Struct: " + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class StructSimpleStandardSchemeFactory implements SchemeFactory { public StructSimpleStandardScheme getScheme() { return new StructSimpleStandardScheme(); } } private static class StructSimpleStandardScheme extends StandardScheme<StructSimple> { public void read(org.apache.thrift.protocol.TProtocol iprot, StructSimple struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // ID16 if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.id16 = iprot.readI16(); struct.setId16IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // ID32 if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.id32 = iprot.readI32(); struct.setId32IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // ID64 if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.id64 = iprot.readI64(); struct.setId64IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // BIN1 if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.bin1 = iprot.readBinary(); struct.setBin1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // B1 if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { struct.b1 = iprot.readBool(); struct.setB1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // D1 if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) { struct.d1 = iprot.readDouble(); struct.setD1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // STR1 if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.str1 = iprot.readString(); struct.setStr1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 8: // L1 if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list0 = iprot.readListBegin(); struct.l1 = new ArrayList<Long>(_list0.size); long _elem1; for (int _i2 = 0; _i2 < _list0.size; ++_i2) { _elem1 = iprot.readI64(); struct.l1.add(_elem1); } iprot.readListEnd(); } struct.setL1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 9: // M1 if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map3 = iprot.readMapBegin(); struct.m1 = new HashMap<String,Boolean>(2*_map3.size); String _key4; boolean _val5; for (int _i6 = 0; _i6 < _map3.size; ++_i6) { _key4 = iprot.readString(); _val5 = iprot.readBool(); struct.m1.put(_key4, _val5); } iprot.readMapEnd(); } struct.setM1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 10: // S1 if (schemeField.type == org.apache.thrift.protocol.TType.SET) { { org.apache.thrift.protocol.TSet _set7 = iprot.readSetBegin(); struct.s1 = new HashSet<Double>(2*_set7.size); double _elem8; for (int _i9 = 0; _i9 < _set7.size; ++_i9) { _elem8 = iprot.readDouble(); struct.s1.add(_elem8); } iprot.readSetEnd(); } struct.setS1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 11: // F1 if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.f1 = new Foo(); struct.f1.read(iprot); struct.setF1IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 12: // FOO_LIST if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list10 = iprot.readListBegin(); struct.fooList = new ArrayList<Foo>(_list10.size); Foo _elem11; for (int _i12 = 0; _i12 < _list10.size; ++_i12) { _elem11 = new Foo(); _elem11.read(iprot); struct.fooList.add(_elem11); } iprot.readListEnd(); } struct.setFooListIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 13: // FOO_MAP if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map13 = iprot.readMapBegin(); struct.fooMap = new HashMap<String,Foo>(2*_map13.size); String _key14; Foo _val15; for (int _i16 = 0; _i16 < _map13.size; ++_i16) { _key14 = iprot.readString(); _val15 = new Foo(); _val15.read(iprot); struct.fooMap.put(_key14, _val15); } iprot.readMapEnd(); } struct.setFooMapIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 14: // OPTION_STR if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.option_str = iprot.readString(); struct.setOption_strIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 15: // E if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.e = com.airbnb.spark.thrift.TestEnum.findByValue(iprot.readI32()); struct.setEIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method if (!struct.isSetId16()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'id16' was not found in serialized data! Struct: " + toString()); } if (!struct.isSetId32()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'id32' was not found in serialized data! Struct: " + toString()); } if (!struct.isSetId64()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'id64' was not found in serialized data! Struct: " + toString()); } if (!struct.isSetB1()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'b1' was not found in serialized data! Struct: " + toString()); } if (!struct.isSetD1()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'd1' was not found in serialized data! Struct: " + toString()); } struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, StructSimple struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(ID16_FIELD_DESC); oprot.writeI16(struct.id16); oprot.writeFieldEnd(); oprot.writeFieldBegin(ID32_FIELD_DESC); oprot.writeI32(struct.id32); oprot.writeFieldEnd(); oprot.writeFieldBegin(ID64_FIELD_DESC); oprot.writeI64(struct.id64); oprot.writeFieldEnd(); if (struct.bin1 != null) { oprot.writeFieldBegin(BIN1_FIELD_DESC); oprot.writeBinary(struct.bin1); oprot.writeFieldEnd(); } oprot.writeFieldBegin(B1_FIELD_DESC); oprot.writeBool(struct.b1); oprot.writeFieldEnd(); oprot.writeFieldBegin(D1_FIELD_DESC); oprot.writeDouble(struct.d1); oprot.writeFieldEnd(); if (struct.str1 != null) { oprot.writeFieldBegin(STR1_FIELD_DESC); oprot.writeString(struct.str1); oprot.writeFieldEnd(); } if (struct.l1 != null) { oprot.writeFieldBegin(L1_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.l1.size())); for (long _iter17 : struct.l1) { oprot.writeI64(_iter17); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.m1 != null) { oprot.writeFieldBegin(M1_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.BOOL, struct.m1.size())); for (Map.Entry<String, Boolean> _iter18 : struct.m1.entrySet()) { oprot.writeString(_iter18.getKey()); oprot.writeBool(_iter18.getValue()); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.s1 != null) { oprot.writeFieldBegin(S1_FIELD_DESC); { oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.DOUBLE, struct.s1.size())); for (double _iter19 : struct.s1) { oprot.writeDouble(_iter19); } oprot.writeSetEnd(); } oprot.writeFieldEnd(); } if (struct.f1 != null) { oprot.writeFieldBegin(F1_FIELD_DESC); struct.f1.write(oprot); oprot.writeFieldEnd(); } if (struct.fooList != null) { oprot.writeFieldBegin(FOO_LIST_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.fooList.size())); for (Foo _iter20 : struct.fooList) { _iter20.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.fooMap != null) { oprot.writeFieldBegin(FOO_MAP_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.fooMap.size())); for (Map.Entry<String, Foo> _iter21 : struct.fooMap.entrySet()) { oprot.writeString(_iter21.getKey()); _iter21.getValue().write(oprot); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.option_str != null) { if (struct.isSetOption_str()) { oprot.writeFieldBegin(OPTION_STR_FIELD_DESC); oprot.writeString(struct.option_str); oprot.writeFieldEnd(); } } if (struct.e != null) { oprot.writeFieldBegin(E_FIELD_DESC); oprot.writeI32(struct.e.getValue()); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class StructSimpleTupleSchemeFactory implements SchemeFactory { public StructSimpleTupleScheme getScheme() { return new StructSimpleTupleScheme(); } } private static class StructSimpleTupleScheme extends TupleScheme<StructSimple> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, StructSimple struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeI16(struct.id16); oprot.writeI32(struct.id32); oprot.writeI64(struct.id64); oprot.writeBinary(struct.bin1); oprot.writeBool(struct.b1); oprot.writeDouble(struct.d1); oprot.writeString(struct.str1); { oprot.writeI32(struct.l1.size()); for (long _iter22 : struct.l1) { oprot.writeI64(_iter22); } } { oprot.writeI32(struct.m1.size()); for (Map.Entry<String, Boolean> _iter23 : struct.m1.entrySet()) { oprot.writeString(_iter23.getKey()); oprot.writeBool(_iter23.getValue()); } } { oprot.writeI32(struct.s1.size()); for (double _iter24 : struct.s1) { oprot.writeDouble(_iter24); } } struct.f1.write(oprot); { oprot.writeI32(struct.fooList.size()); for (Foo _iter25 : struct.fooList) { _iter25.write(oprot); } } { oprot.writeI32(struct.fooMap.size()); for (Map.Entry<String, Foo> _iter26 : struct.fooMap.entrySet()) { oprot.writeString(_iter26.getKey()); _iter26.getValue().write(oprot); } } oprot.writeI32(struct.e.getValue()); BitSet optionals = new BitSet(); if (struct.isSetOption_str()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetOption_str()) { oprot.writeString(struct.option_str); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, StructSimple struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.id16 = iprot.readI16(); struct.setId16IsSet(true); struct.id32 = iprot.readI32(); struct.setId32IsSet(true); struct.id64 = iprot.readI64(); struct.setId64IsSet(true); struct.bin1 = iprot.readBinary(); struct.setBin1IsSet(true); struct.b1 = iprot.readBool(); struct.setB1IsSet(true); struct.d1 = iprot.readDouble(); struct.setD1IsSet(true); struct.str1 = iprot.readString(); struct.setStr1IsSet(true); { org.apache.thrift.protocol.TList _list27 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32()); struct.l1 = new ArrayList<Long>(_list27.size); long _elem28; for (int _i29 = 0; _i29 < _list27.size; ++_i29) { _elem28 = iprot.readI64(); struct.l1.add(_elem28); } } struct.setL1IsSet(true); { org.apache.thrift.protocol.TMap _map30 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.BOOL, iprot.readI32()); struct.m1 = new HashMap<String,Boolean>(2*_map30.size); String _key31; boolean _val32; for (int _i33 = 0; _i33 < _map30.size; ++_i33) { _key31 = iprot.readString(); _val32 = iprot.readBool(); struct.m1.put(_key31, _val32); } } struct.setM1IsSet(true); { org.apache.thrift.protocol.TSet _set34 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.DOUBLE, iprot.readI32()); struct.s1 = new HashSet<Double>(2*_set34.size); double _elem35; for (int _i36 = 0; _i36 < _set34.size; ++_i36) { _elem35 = iprot.readDouble(); struct.s1.add(_elem35); } } struct.setS1IsSet(true); struct.f1 = new Foo(); struct.f1.read(iprot); struct.setF1IsSet(true); { org.apache.thrift.protocol.TList _list37 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.fooList = new ArrayList<Foo>(_list37.size); Foo _elem38; for (int _i39 = 0; _i39 < _list37.size; ++_i39) { _elem38 = new Foo(); _elem38.read(iprot); struct.fooList.add(_elem38); } } struct.setFooListIsSet(true); { org.apache.thrift.protocol.TMap _map40 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.fooMap = new HashMap<String,Foo>(2*_map40.size); String _key41; Foo _val42; for (int _i43 = 0; _i43 < _map40.size; ++_i43) { _key41 = iprot.readString(); _val42 = new Foo(); _val42.read(iprot); struct.fooMap.put(_key41, _val42); } } struct.setFooMapIsSet(true); struct.e = com.airbnb.spark.thrift.TestEnum.findByValue(iprot.readI32()); struct.setEIsSet(true); BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.option_str = iprot.readString(); struct.setOption_strIsSet(true); } } } }
3,979
0
Create_ds/airbnb-spark-thrift/src/test/scala/com/airbnb/spark
Create_ds/airbnb-spark-thrift/src/test/scala/com/airbnb/spark/thrift/Foo.java
/** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.airbnb.spark.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2017-4-21") public class Foo implements org.apache.thrift.TBase<Foo, Foo._Fields>, java.io.Serializable, Cloneable, Comparable<Foo> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Foo"); private static final org.apache.thrift.protocol.TField ID16_FIELD_DESC = new org.apache.thrift.protocol.TField("id16", org.apache.thrift.protocol.TType.I16, (short)1); private static final org.apache.thrift.protocol.TField ID32_FIELD_DESC = new org.apache.thrift.protocol.TField("id32", org.apache.thrift.protocol.TType.I32, (short)2); private static final org.apache.thrift.protocol.TField ID64_FIELD_DESC = new org.apache.thrift.protocol.TField("id64", org.apache.thrift.protocol.TType.I64, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new FooStandardSchemeFactory()); schemes.put(TupleScheme.class, new FooTupleSchemeFactory()); } public short id16; // required public int id32; // required public long id64; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ID16((short)1, "id16"), ID32((short)2, "id32"), ID64((short)3, "id64"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // ID16 return ID16; case 2: // ID32 return ID32; case 3: // ID64 return ID64; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __ID16_ISSET_ID = 0; private static final int __ID32_ISSET_ID = 1; private static final int __ID64_ISSET_ID = 2; private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.ID16, new org.apache.thrift.meta_data.FieldMetaData("id16", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.ID32, new org.apache.thrift.meta_data.FieldMetaData("id32", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.ID64, new org.apache.thrift.meta_data.FieldMetaData("id64", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Foo.class, metaDataMap); } public Foo() { } public Foo( short id16, int id32, long id64) { this(); this.id16 = id16; setId16IsSet(true); this.id32 = id32; setId32IsSet(true); this.id64 = id64; setId64IsSet(true); } /** * Performs a deep copy on <i>other</i>. */ public Foo(Foo other) { __isset_bitfield = other.__isset_bitfield; this.id16 = other.id16; this.id32 = other.id32; this.id64 = other.id64; } public Foo deepCopy() { return new Foo(this); } @Override public void clear() { setId16IsSet(false); this.id16 = 0; setId32IsSet(false); this.id32 = 0; setId64IsSet(false); this.id64 = 0; } public short getId16() { return this.id16; } public Foo setId16(short id16) { this.id16 = id16; setId16IsSet(true); return this; } public void unsetId16() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID16_ISSET_ID); } /** Returns true if field id16 is set (has been assigned a value) and false otherwise */ public boolean isSetId16() { return EncodingUtils.testBit(__isset_bitfield, __ID16_ISSET_ID); } public void setId16IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID16_ISSET_ID, value); } public int getId32() { return this.id32; } public Foo setId32(int id32) { this.id32 = id32; setId32IsSet(true); return this; } public void unsetId32() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID32_ISSET_ID); } /** Returns true if field id32 is set (has been assigned a value) and false otherwise */ public boolean isSetId32() { return EncodingUtils.testBit(__isset_bitfield, __ID32_ISSET_ID); } public void setId32IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID32_ISSET_ID, value); } public long getId64() { return this.id64; } public Foo setId64(long id64) { this.id64 = id64; setId64IsSet(true); return this; } public void unsetId64() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID64_ISSET_ID); } /** Returns true if field id64 is set (has been assigned a value) and false otherwise */ public boolean isSetId64() { return EncodingUtils.testBit(__isset_bitfield, __ID64_ISSET_ID); } public void setId64IsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID64_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case ID16: if (value == null) { unsetId16(); } else { setId16((Short)value); } break; case ID32: if (value == null) { unsetId32(); } else { setId32((Integer)value); } break; case ID64: if (value == null) { unsetId64(); } else { setId64((Long)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case ID16: return Short.valueOf(getId16()); case ID32: return Integer.valueOf(getId32()); case ID64: return Long.valueOf(getId64()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case ID16: return isSetId16(); case ID32: return isSetId32(); case ID64: return isSetId64(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof Foo) return this.equals((Foo)that); return false; } public boolean equals(Foo that) { if (that == null) return false; boolean this_present_id16 = true; boolean that_present_id16 = true; if (this_present_id16 || that_present_id16) { if (!(this_present_id16 && that_present_id16)) return false; if (this.id16 != that.id16) return false; } boolean this_present_id32 = true; boolean that_present_id32 = true; if (this_present_id32 || that_present_id32) { if (!(this_present_id32 && that_present_id32)) return false; if (this.id32 != that.id32) return false; } boolean this_present_id64 = true; boolean that_present_id64 = true; if (this_present_id64 || that_present_id64) { if (!(this_present_id64 && that_present_id64)) return false; if (this.id64 != that.id64) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_id16 = true; list.add(present_id16); if (present_id16) list.add(id16); boolean present_id32 = true; list.add(present_id32); if (present_id32) list.add(id32); boolean present_id64 = true; list.add(present_id64); if (present_id64) list.add(id64); return list.hashCode(); } @Override public int compareTo(Foo other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetId16()).compareTo(other.isSetId16()); if (lastComparison != 0) { return lastComparison; } if (isSetId16()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id16, other.id16); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetId32()).compareTo(other.isSetId32()); if (lastComparison != 0) { return lastComparison; } if (isSetId32()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id32, other.id32); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetId64()).compareTo(other.isSetId64()); if (lastComparison != 0) { return lastComparison; } if (isSetId64()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id64, other.id64); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("Foo("); boolean first = true; sb.append("id16:"); sb.append(this.id16); first = false; if (!first) sb.append(", "); sb.append("id32:"); sb.append(this.id32); first = false; if (!first) sb.append(", "); sb.append("id64:"); sb.append(this.id64); first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // alas, we cannot check 'id16' because it's a primitive and you chose the non-beans generator. // alas, we cannot check 'id32' because it's a primitive and you chose the non-beans generator. // alas, we cannot check 'id64' because it's a primitive and you chose the non-beans generator. // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class FooStandardSchemeFactory implements SchemeFactory { public FooStandardScheme getScheme() { return new FooStandardScheme(); } } private static class FooStandardScheme extends StandardScheme<Foo> { public void read(org.apache.thrift.protocol.TProtocol iprot, Foo struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // ID16 if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.id16 = iprot.readI16(); struct.setId16IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // ID32 if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.id32 = iprot.readI32(); struct.setId32IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // ID64 if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.id64 = iprot.readI64(); struct.setId64IsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method if (!struct.isSetId16()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'id16' was not found in serialized data! Struct: " + toString()); } if (!struct.isSetId32()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'id32' was not found in serialized data! Struct: " + toString()); } if (!struct.isSetId64()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'id64' was not found in serialized data! Struct: " + toString()); } struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, Foo struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(ID16_FIELD_DESC); oprot.writeI16(struct.id16); oprot.writeFieldEnd(); oprot.writeFieldBegin(ID32_FIELD_DESC); oprot.writeI32(struct.id32); oprot.writeFieldEnd(); oprot.writeFieldBegin(ID64_FIELD_DESC); oprot.writeI64(struct.id64); oprot.writeFieldEnd(); oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class FooTupleSchemeFactory implements SchemeFactory { public FooTupleScheme getScheme() { return new FooTupleScheme(); } } private static class FooTupleScheme extends TupleScheme<Foo> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, Foo struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeI16(struct.id16); oprot.writeI32(struct.id32); oprot.writeI64(struct.id64); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, Foo struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.id16 = iprot.readI16(); struct.setId16IsSet(true); struct.id32 = iprot.readI32(); struct.setId32IsSet(true); struct.id64 = iprot.readI64(); struct.setId64IsSet(true); } } }
3,980
0
Create_ds/docker-selion/test/src/test
Create_ds/docker-selion/test/src/test/java/SimpleTest.java
import com.paypal.selion.annotations.WebTest; import com.paypal.selion.platform.grid.Grid; import org.testng.annotations.Test; import static org.testng.Assert.assertTrue; public class SimpleTest { @Test @WebTest(additionalCapabilities="marionette:false") public void openWikipedia () { Grid.driver().get("http://www.wikipedia.org"); assertTrue(Grid.driver().getTitle().contains("Wikipedia")); } }
3,981
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/FlinkKinesisFirehoseProducerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.producer; import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.FlinkKinesisFirehoseException; import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.RecordCouldNotBeBuffered; import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.RecordCouldNotBeSentException; import com.amazonaws.services.kinesisanalytics.flink.connectors.serialization.KinesisFirehoseSerializationSchema; import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehose; import com.amazonaws.services.kinesisfirehose.model.Record; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import javax.annotation.Nonnull; import java.util.Properties; import java.util.concurrent.CompletableFuture; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType; import static com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl.FirehoseProducer.UserRecordResult; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.DEFAULT_DELIVERY_STREAM; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.DEFAULT_TEST_ERROR_MSG; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.getContext; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.getKinesisFirehoseSerializationSchema; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.getSerializationSchema; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.getStandardProperties; import static org.apache.flink.streaming.api.functions.sink.SinkFunction.Context; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.fail; public class FlinkKinesisFirehoseProducerTest { private static final Logger LOGGER = LoggerFactory.getLogger(FlinkKinesisFirehoseProducerTest.class); private FlinkKinesisFirehoseProducer<String> flinkKinesisFirehoseProducer; private Context<String> context; private final Configuration properties = new Configuration(); @Mock private AmazonKinesisFirehose kinesisFirehoseClient; @Mock private IProducer<UserRecordResult, Record> firehoseProducer; @BeforeMethod public void init() { MockitoAnnotations.initMocks(this); flinkKinesisFirehoseProducer = createProducer(); doReturn(firehoseProducer).when(flinkKinesisFirehoseProducer).createFirehoseProducer(); doReturn(kinesisFirehoseClient).when(flinkKinesisFirehoseProducer).createKinesisFirehoseClient(); context = getContext(); } @DataProvider(name = "kinesisFirehoseSerializationProvider") public Object[][] kinesisFirehoseSerializationProvider() { return new Object[][]{ {DEFAULT_DELIVERY_STREAM, getKinesisFirehoseSerializationSchema(), getStandardProperties(), null}, {DEFAULT_DELIVERY_STREAM, getKinesisFirehoseSerializationSchema(), getStandardProperties(), CredentialProviderType.BASIC}, }; } @DataProvider(name = "serializationSchemaProvider") public Object[][] serializationSchemaProvider() { return new Object[][] { {DEFAULT_DELIVERY_STREAM, getSerializationSchema(), getStandardProperties(), null}, {DEFAULT_DELIVERY_STREAM, getSerializationSchema(), getStandardProperties(), CredentialProviderType.BASIC} }; } @Test(dataProvider = "kinesisFirehoseSerializationProvider") public void testFlinkKinesisFirehoseProducerHappyCase(final String deliveryStream, final KinesisFirehoseSerializationSchema<String> schema, final Properties configProps, final CredentialProviderType credentialType) { FlinkKinesisFirehoseProducer<String> firehoseProducer = (credentialType != null) ? new FlinkKinesisFirehoseProducer<>(deliveryStream, schema, configProps, credentialType) : new FlinkKinesisFirehoseProducer<>(deliveryStream, schema, configProps); assertNotNull(firehoseProducer); } @Test(dataProvider = "serializationSchemaProvider") public void testFlinkKinesisFirehoseProducerWithSerializationSchemaHappyCase(final String deliveryStream , final SerializationSchema<String> schema, final Properties configProps, CredentialProviderType credentialType) { FlinkKinesisFirehoseProducer<String> firehoseProducer = (credentialType != null) ? new FlinkKinesisFirehoseProducer<>(deliveryStream, schema, configProps, credentialType) : new FlinkKinesisFirehoseProducer<>(deliveryStream, schema, configProps); assertNotNull(firehoseProducer); } /** * This test is responsible for testing rethrow in for an async error closing the sink (producer). */ @Test public void testAsyncErrorRethrownOnClose() throws Exception { try { flinkKinesisFirehoseProducer.setFailOnError(true); when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(true, false)); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); Thread.sleep(1000); flinkKinesisFirehoseProducer.close(); LOGGER.warn("Should not reach this line"); fail(); } catch (FlinkKinesisFirehoseException ex) { LOGGER.info("Exception has been thrown inside testAsyncErrorRethrownOnClose"); exceptionAssert(ex); } finally { verify(flinkKinesisFirehoseProducer, times(1)).open(properties); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test", context); verify(flinkKinesisFirehoseProducer, times(1)).close(); } } /** * This test is responsible for testing an async error rethrow during invoke. */ @Test public void testAsyncErrorRethrownOnInvoke() throws Exception { try { flinkKinesisFirehoseProducer.setFailOnError(true); when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(true, false)) .thenReturn(getUserRecordResult(false, true)); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); Thread.sleep(1000); flinkKinesisFirehoseProducer.invoke("Test2", context); LOGGER.warn("Should not reach this line"); fail(); } catch (FlinkKinesisFirehoseException ex) { LOGGER.info("Exception has been thrown inside testAsyncErrorRethrownOnInvoke"); exceptionAssert(ex); } finally { verify(flinkKinesisFirehoseProducer, times(1)).open(properties); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test", context); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test2", context); verify(flinkKinesisFirehoseProducer, never()).close(); } } @Test public void testAsyncErrorRethrownWhenRecordFailedToSend() throws Exception { flinkKinesisFirehoseProducer.setFailOnError(true); UserRecordResult recordResult = new UserRecordResult(); recordResult.setSuccessful(false); recordResult.setException(new RuntimeException("A bad thing has happened")); when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(CompletableFuture.completedFuture(recordResult)); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); assertThatExceptionOfType(FlinkKinesisFirehoseException.class) .isThrownBy(() -> flinkKinesisFirehoseProducer.close()) .withMessageContaining("An exception has been thrown while trying to process a record") .withCauseInstanceOf(RecordCouldNotBeSentException.class) .withStackTraceContaining("A bad thing has happened"); } /** * This test is responsible for testing async error, however should not rethrow in case of failures. * This is the default scenario for FlinkKinesisFirehoseProducer. */ @Test public void testAsyncErrorNotRethrowOnInvoke() throws Exception { flinkKinesisFirehoseProducer.setFailOnError(false); when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(true, false)) .thenReturn(getUserRecordResult(true, true)); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); flinkKinesisFirehoseProducer.invoke("Test2", context); verify(flinkKinesisFirehoseProducer, times(1)).open(properties); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test", context); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test2", context); verify(flinkKinesisFirehoseProducer, never()).close(); } @Test public void testFlinkKinesisFirehoseProducerHappyWorkflow() throws Exception { when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(false, true)); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); flinkKinesisFirehoseProducer.close(); verify(flinkKinesisFirehoseProducer, times(1)).open(properties); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test", context); verify(flinkKinesisFirehoseProducer, times(1)).close(); } @Test public void testFlinkKinesisFirehoseProducerCloseAndFlushHappyWorkflow() throws Exception { when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(false, true)); doNothing().when(firehoseProducer).flush(); when(firehoseProducer.getOutstandingRecordsCount()).thenReturn(1).thenReturn(0); when(firehoseProducer.isFlushFailed()).thenReturn(false); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); flinkKinesisFirehoseProducer.close(); verify(firehoseProducer, times(1)).flush(); } @Test public void testFlinkKinesisFirehoseProducerTakeSnapshotHappyWorkflow() throws Exception { when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(false, true)); doNothing().when(firehoseProducer).flush(); when(firehoseProducer.getOutstandingRecordsCount()).thenReturn(1).thenReturn(1).thenReturn(1).thenReturn(0); when(firehoseProducer.isFlushFailed()).thenReturn(false); FunctionSnapshotContext functionContext = mock(FunctionSnapshotContext.class); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); flinkKinesisFirehoseProducer.snapshotState(functionContext); verify(firehoseProducer, times(1)).flush(); } @Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = "An error has occurred trying to flush the buffer synchronously.*") public void testFlinkKinesisFirehoseProducerTakeSnapshotFailedFlush() throws Exception { when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(false, true)); doNothing().when(firehoseProducer).flush(); when(firehoseProducer.getOutstandingRecordsCount()).thenReturn(1).thenReturn(1); when(firehoseProducer.isFlushFailed()).thenReturn(false).thenReturn(true); FunctionSnapshotContext functionContext = mock(FunctionSnapshotContext.class); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); flinkKinesisFirehoseProducer.snapshotState(functionContext); fail("We should not reach here."); } /** * This test is responsible for testing a scenarion when there are exceptions to be thrown closing the sink (producer) * This is the default scenario for FlinkKinesisFirehoseProducer. */ @Test public void testAsyncErrorNotRethrownOnClose() throws Exception { flinkKinesisFirehoseProducer.setFailOnError(false); when(firehoseProducer.addUserRecord(any(Record.class))) .thenReturn(getUserRecordResult(true, false)) .thenReturn(getUserRecordResult(true, false)); flinkKinesisFirehoseProducer.open(properties); flinkKinesisFirehoseProducer.invoke("Test", context); flinkKinesisFirehoseProducer.invoke("Test2", context); flinkKinesisFirehoseProducer.close(); verify(flinkKinesisFirehoseProducer, times(1)).open(properties); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test", context); verify(flinkKinesisFirehoseProducer, times(1)).invoke("Test2", context); verify(flinkKinesisFirehoseProducer, times(1)).close(); } private void exceptionAssert(FlinkKinesisFirehoseException ex) { final String expectedErrorMsg = "An exception has been thrown while trying to process a record"; LOGGER.info(ex.getMessage()); assertThat(ex.getMessage()).isEqualTo(expectedErrorMsg); assertThat(ex.getCause()).isInstanceOf(RecordCouldNotBeBuffered.class); LOGGER.info(ex.getCause().getMessage()); assertThat(ex.getCause().getMessage()).isEqualTo(DEFAULT_TEST_ERROR_MSG); } @Nonnull private CompletableFuture<UserRecordResult> getUserRecordResult(final boolean isFailedRecord, final boolean isSuccessful) { UserRecordResult recordResult = new UserRecordResult().setSuccessful(isSuccessful); if (isFailedRecord) { CompletableFuture<UserRecordResult> future = new CompletableFuture<>(); future.completeExceptionally(new RecordCouldNotBeBuffered(DEFAULT_TEST_ERROR_MSG)); return future; } else { return CompletableFuture.completedFuture(recordResult); } } @Nonnull private FlinkKinesisFirehoseProducer<String> createProducer() { return spy(new FlinkKinesisFirehoseProducer<>(DEFAULT_DELIVERY_STREAM, getKinesisFirehoseSerializationSchema(), getStandardProperties())); } }
3,982
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/impl/FirehoseProducerConfigurationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl; import org.testng.annotations.Test; import javax.annotation.Nonnull; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_BASE_BACKOFF; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_INTERVAL_BETWEEN_FLUSHES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAXIMUM_BATCH_BYTES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAX_BACKOFF; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAX_BUFFER_SIZE; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAX_BUFFER_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAX_OPERATION_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_NUMBER_OF_RETRIES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_WAIT_TIME_FOR_BUFFER_FULL; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_BASE_BACKOFF_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_FLUSH_MAX_NUMBER_OF_RETRIES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_FLUSH_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_FULL_WAIT_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_BACKOFF_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_SIZE; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_MAX_OPERATION_TIMEOUT; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; public class FirehoseProducerConfigurationTest { private static final String REGION = "us-east-1"; @Test public void testBuilderWithDefaultProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration.builder(REGION).build(); assertThat(configuration.getMaxBufferSize()).isEqualTo(DEFAULT_MAX_BUFFER_SIZE); assertThat(configuration.getMaxPutRecordBatchBytes()).isEqualTo(DEFAULT_MAXIMUM_BATCH_BYTES); assertThat(configuration.getNumberOfRetries()).isEqualTo(DEFAULT_NUMBER_OF_RETRIES); assertThat(configuration.getBufferFullWaitTimeoutInMillis()).isEqualTo(DEFAULT_WAIT_TIME_FOR_BUFFER_FULL); assertThat(configuration.getBufferTimeoutInMillis()).isEqualTo(DEFAULT_MAX_BUFFER_TIMEOUT); assertThat(configuration.getBufferTimeoutBetweenFlushes()).isEqualTo(DEFAULT_INTERVAL_BETWEEN_FLUSHES); assertThat(configuration.getMaxBackOffInMillis()).isEqualTo(DEFAULT_MAX_BACKOFF); assertThat(configuration.getBaseBackOffInMillis()).isEqualTo(DEFAULT_BASE_BACKOFF); assertThat(configuration.getMaxOperationTimeoutInMillis()).isEqualTo(DEFAULT_MAX_OPERATION_TIMEOUT); } @Test public void testBuilderWithMaxBufferSize() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withMaxBufferSize(250) .build(); assertThat(configuration.getMaxBufferSize()).isEqualTo(250); } @Test public void testBuilderWithMaxBufferSizeRejectsZero() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withMaxBufferSize(0)) .withMessageContaining("Buffer size must be between 1 and 500"); } @Test public void testBuilderWithMaxBufferSizeRejectsUpperLimit() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withMaxBufferSize(501)) .withMessageContaining("Buffer size must be between 1 and 500"); } @Test public void testBuilderWithMaxPutRecordBatchBytes() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withMaxPutRecordBatchBytes(100) .build(); assertThat(configuration.getMaxPutRecordBatchBytes()).isEqualTo(100); } @Test public void testBuilderWithMaxPutRecordBatchBytesRejectsZero() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withMaxPutRecordBatchBytes(0)) .withMessageContaining("Maximum batch size in bytes must be between 1 and 4194304"); } @Test public void testBuilderWithMaxPutRecordBatchBytesRejectsUpperLimit() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withMaxPutRecordBatchBytes(4194305)) .withMessageContaining("Maximum batch size in bytes must be between 1 and 4194304"); } @Test public void testBuilderWithNumberOfRetries() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withNumberOfRetries(100) .build(); assertThat(configuration.getNumberOfRetries()).isEqualTo(100); } @Test public void testBuilderWithNumberOfRetriesRejectsNegative() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withNumberOfRetries(-1)) .withMessageContaining("Number of retries cannot be negative"); } @Test public void testBuilderWithBufferTimeoutInMillis() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withBufferTimeoutInMillis(12345L) .build(); assertThat(configuration.getBufferTimeoutInMillis()).isEqualTo(12345L); } @Test public void testBuilderWithBufferTimeoutInMillisRejects() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withBufferTimeoutInMillis(-1)) .withMessageContaining("Flush timeout should be greater than 0"); } @Test public void testBuilderWithMaxOperationTimeoutInMillis() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withMaxOperationTimeoutInMillis(999L) .build(); assertThat(configuration.getMaxOperationTimeoutInMillis()).isEqualTo(999L); } @Test public void testBuilderWithMaxOperationTimeoutInMillisRejectsNegative() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withMaxOperationTimeoutInMillis(-1)) .withMessageContaining("Max operation timeout should be greater than 0"); } @Test public void testBuilderWithBufferFullWaitTimeoutInMillis() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withBufferFullWaitTimeoutInMillis(1L) .build(); assertThat(configuration.getBufferFullWaitTimeoutInMillis()).isEqualTo(1L); } @Test public void testBuilderWithBufferFullWaitTimeoutInMillisRejectsNegative() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withBufferFullWaitTimeoutInMillis(-1)) .withMessageContaining("Buffer full waiting timeout should be greater than 0"); } @Test public void testBuilderWithBufferTimeoutBetweenFlushes() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withBufferTimeoutBetweenFlushes(2L) .build(); assertThat(configuration.getBufferTimeoutBetweenFlushes()).isEqualTo(2L); } @Test public void testBuilderWithBufferTimeoutBetweenFlushesRejectsNegative() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withBufferTimeoutBetweenFlushes(-1)) .withMessageContaining("Interval between flushes cannot be negative"); } @Test public void testBuilderWithMaxBackOffInMillis() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withMaxBackOffInMillis(3L) .build(); assertThat(configuration.getMaxBackOffInMillis()).isEqualTo(3L); } @Test public void testBuilderWithMaxBackOffInMillisRejectsNegative() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withMaxBackOffInMillis(-1)) .withMessageContaining("Max backoff timeout should be greater than 0"); } @Test public void testBuilderWithBaseBackOffInMillis() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withBaseBackOffInMillis(4L) .build(); assertThat(configuration.getBaseBackOffInMillis()).isEqualTo(4L); } @Test public void testBuilderWithBaseBackOffInMillisRejectsNegative() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> FirehoseProducerConfiguration.builder(REGION).withBaseBackOffInMillis(-1)) .withMessageContaining("Base backoff timeout should be greater than 0"); } @Test public void testBuilderWithMaxBufferSizeFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_MAX_SIZE, "250")) .build(); assertThat(configuration.getMaxBufferSize()).isEqualTo(250); } @Test public void testBuilderWithMaxPutRecordBatchBytesFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES, "100")) .build(); assertThat(configuration.getMaxPutRecordBatchBytes()).isEqualTo(100); } @Test public void testBuilderWithNumberOfRetriesFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_FLUSH_MAX_NUMBER_OF_RETRIES, "100")) .build(); assertThat(configuration.getNumberOfRetries()).isEqualTo(100); } @Test public void testBuilderWithBufferTimeoutInMillisFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT, "12345")) .build(); assertThat(configuration.getBufferTimeoutInMillis()).isEqualTo(12345L); } @Test public void testBuilderWithMaxOperationTimeoutInMillisFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_MAX_OPERATION_TIMEOUT, "999")) .build(); assertThat(configuration.getMaxOperationTimeoutInMillis()).isEqualTo(999L); } @Test public void testBuilderWithBufferFullWaitTimeoutInMillisFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_FULL_WAIT_TIMEOUT, "1")) .build(); assertThat(configuration.getBufferFullWaitTimeoutInMillis()).isEqualTo(1L); } @Test public void testBuilderWithBufferTimeoutBetweenFlushesFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_FLUSH_TIMEOUT, "2")) .build(); assertThat(configuration.getBufferTimeoutBetweenFlushes()).isEqualTo(2L); } @Test public void testBuilderWithMaxBackOffInMillisFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_MAX_BACKOFF_TIMEOUT, "3")) .build(); assertThat(configuration.getMaxBackOffInMillis()).isEqualTo(3L); } @Test public void testBuilderWithBaseBackOffInMillisFromProperties() { FirehoseProducerConfiguration configuration = FirehoseProducerConfiguration .builder(REGION) .withProperties(props(FIREHOSE_PRODUCER_BUFFER_BASE_BACKOFF_TIMEOUT, "4")) .build(); assertThat(configuration.getBaseBackOffInMillis()).isEqualTo(4L); } @Nonnull private Properties props(@Nonnull final String key, @Nonnull final String value) { Properties properties = new Properties(); properties.setProperty(key, value); return properties; } }
3,983
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/impl/FirehoseProducerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl; import com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl.FirehoseProducer.FirehoseThreadFactory; import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehose; import com.amazonaws.services.kinesisfirehose.model.PutRecordBatchRequest; import com.amazonaws.services.kinesisfirehose.model.PutRecordBatchResponseEntry; import com.amazonaws.services.kinesisfirehose.model.PutRecordBatchResult; import com.amazonaws.services.kinesisfirehose.model.Record; import org.apache.commons.lang3.RandomStringUtils; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import javax.annotation.Nonnull; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.stream.IntStream; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_REGION; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAX_BUFFER_SIZE; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl.FirehoseProducer.UserRecordResult; import static com.amazonaws.services.kinesisanalytics.flink.connectors.testutils.TestUtils.DEFAULT_DELIVERY_STREAM; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.testng.Assert.fail; /** * All tests make relies on best effort to simulate and wait how a multi-threading system should be behave, * trying to rely on deterministic results, however, the results and timing depends on the operating system scheduler and JVM. * So, if any of these tests failed, you may want to increase the sleep timeout or perhaps comment out the failed ones. */ public class FirehoseProducerTest { private static final Logger LOGGER = LoggerFactory.getLogger(FirehoseProducerTest.class); private static final int KB_512 = 512 * 1_024; @Mock private AmazonKinesisFirehose firehoseClient; private FirehoseProducer<UserRecordResult, Record> firehoseProducer; @Captor private ArgumentCaptor<PutRecordBatchRequest> putRecordCaptor; @BeforeMethod public void init() { MockitoAnnotations.initMocks(this); this.firehoseProducer = createFirehoseProducer(); } @Test public void testFirehoseProducerSingleThreadHappyCase() throws Exception { PutRecordBatchResult successResult = new PutRecordBatchResult(); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(successResult); for (int i = 0; i < DEFAULT_MAX_BUFFER_SIZE; ++i) { addRecord(firehoseProducer); } Thread.sleep(2000); LOGGER.debug("Number of outstanding records: {}", firehoseProducer.getOutstandingRecordsCount()); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); } @Test public void testFirehoseProducerMultiThreadHappyCase() throws Exception { PutRecordBatchResult successResult = new PutRecordBatchResult(); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(successResult); ExecutorService exec = Executors.newFixedThreadPool(4); List<Callable<CompletableFuture<UserRecordResult>>> futures = new ArrayList<>(); for (int j = 0; j < DEFAULT_MAX_BUFFER_SIZE; ++j) { futures.add(() -> addRecord(firehoseProducer)); } exec.invokeAll(futures); Thread.currentThread().join(3000); LOGGER.debug("Number of outstanding items: {}", firehoseProducer.getOutstandingRecordsCount()); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); } @Test public void testFirehoseProducerMultiThreadFlushSyncHappyCase() throws Exception { PutRecordBatchResult successResult = mock(PutRecordBatchResult.class); ArgumentCaptor<PutRecordBatchRequest> captor = ArgumentCaptor.forClass(PutRecordBatchRequest.class); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(successResult); ExecutorService exec = Executors.newFixedThreadPool(4); List<Callable<CompletableFuture<UserRecordResult>>> futures = new ArrayList<>(); for (int j = 0; j < 400; ++j) { futures.add(() -> addRecord(firehoseProducer)); } List<Future<CompletableFuture<UserRecordResult>>> results = exec.invokeAll(futures); for (Future<CompletableFuture<UserRecordResult>> f : results) { while(!f.isDone()) { Thread.sleep(100); } CompletableFuture<UserRecordResult> fi = f.get(); UserRecordResult r = fi.get(); assertThat(r.isSuccessful()).isTrue(); } firehoseProducer.flushSync(); LOGGER.debug("Number of outstanding items: {}", firehoseProducer.getOutstandingRecordsCount()); verify(firehoseClient).putRecordBatch(captor.capture()); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); assertThat(firehoseProducer.isFlushFailed()).isFalse(); } @Test public void testFirehoseProducerMultiThreadFlushAndWaitHappyCase() throws Exception { PutRecordBatchResult successResult = mock(PutRecordBatchResult.class); ArgumentCaptor<PutRecordBatchRequest> captor = ArgumentCaptor.forClass(PutRecordBatchRequest.class); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(successResult); ExecutorService exec = Executors.newFixedThreadPool(4); List<Callable<CompletableFuture<UserRecordResult>>> futures = new ArrayList<>(); for (int j = 0; j < 400; ++j) { futures.add(() -> addRecord(firehoseProducer)); } List<Future<CompletableFuture<UserRecordResult>>> results = exec.invokeAll(futures); for (Future<CompletableFuture<UserRecordResult>> f : results) { while(!f.isDone()) { Thread.sleep(100); } CompletableFuture<UserRecordResult> fi = f.get(); UserRecordResult r = fi.get(); assertThat(r.isSuccessful()).isTrue(); } while (firehoseProducer.getOutstandingRecordsCount() > 0 && !firehoseProducer.isFlushFailed()) { firehoseProducer.flush(); try { Thread.sleep(500); } catch (InterruptedException ex) { fail(); } } LOGGER.debug("Number of outstanding items: {}", firehoseProducer.getOutstandingRecordsCount()); verify(firehoseClient).putRecordBatch(captor.capture()); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); assertThat(firehoseProducer.isFlushFailed()).isFalse(); } @Test public void testFirehoseProducerSingleThreadTimeoutExpiredHappyCase() throws Exception { PutRecordBatchResult successResult = new PutRecordBatchResult(); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(successResult); for (int i = 0; i < 100; ++i) { addRecord(firehoseProducer); } Thread.sleep(2000); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); } @Test public void testFirehoseProducerSingleThreadBufferIsFullHappyCase() throws Exception { PutRecordBatchResult successResult = new PutRecordBatchResult(); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(successResult); for (int i = 0; i < 2 * DEFAULT_MAX_BUFFER_SIZE; ++i) { addRecord(firehoseProducer); } Thread.sleep(2000); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); } /** * This test is responsible for checking if the consumer thread has performed the work or not, so there is no way to * throw an exception to be catch here, so the assertion goes along the fact if the buffer was flushed or not. */ @Test public void testFirehoseProducerSingleThreadFailedToSendRecords() throws Exception { PutRecordBatchResult failedResult = new PutRecordBatchResult() .withFailedPutCount(1) .withRequestResponses(new PutRecordBatchResponseEntry() .withErrorCode("400") .withErrorMessage("Invalid Schema")); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))).thenReturn(failedResult); for (int i = 0; i < DEFAULT_MAX_BUFFER_SIZE; ++i) { addRecord(firehoseProducer); } Thread.sleep(2000); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(DEFAULT_MAX_BUFFER_SIZE); assertThat(firehoseProducer.isFlushFailed()).isTrue(); } @Test public void testFirehoseProducerBatchesRecords() throws Exception { when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))) .thenReturn(new PutRecordBatchResult()); // Fill up the maximum capacity: 8 * 512kB = 4MB IntStream.range(0, 8).forEach(i -> addRecord(firehoseProducer, KB_512)); // Add a single byte to overflow the maximum addRecord(firehoseProducer, 1); Thread.sleep(3000); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); verify(firehoseClient, times(2)).putRecordBatch(putRecordCaptor.capture()); // The first batch should contain 4 records (up to 4MB), the second should contain the remaining record assertThat(putRecordCaptor.getAllValues().get(0).getRecords()) .hasSize(8).allMatch(e -> e.getData().limit() == KB_512); assertThat(putRecordCaptor.getAllValues().get(1).getRecords()) .hasSize(1).allMatch(e -> e.getData().limit() == 1); } @Test public void testFirehoseProducerBatchesRecordsWithCustomBatchSize() throws Exception { Properties config = new Properties(); config.setProperty(FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES, "100"); FirehoseProducer<UserRecordResult, Record> producer = createFirehoseProducer(config); when(firehoseClient.putRecordBatch(any(PutRecordBatchRequest.class))) .thenReturn(new PutRecordBatchResult()); // Overflow the maximum capacity: 2 * 100kB = 200kB IntStream.range(0, 2).forEach(i -> addRecord(producer, 100)); Thread.sleep(3000); assertThat(firehoseProducer.getOutstandingRecordsCount()).isEqualTo(0); verify(firehoseClient, times(2)).putRecordBatch(putRecordCaptor.capture()); // The first batch should contain 1 record (up to 100kB), the second should contain the remaining record assertThat(putRecordCaptor.getAllValues().get(0).getRecords()) .hasSize(1).allMatch(e -> e.getData().limit() == 100); assertThat(putRecordCaptor.getAllValues().get(1).getRecords()) .hasSize(1).allMatch(e -> e.getData().limit() == 100); } @Test public void testThreadFactoryNewThreadName() { FirehoseThreadFactory threadFactory = new FirehoseThreadFactory(); Thread thread1 = threadFactory.newThread(() -> LOGGER.info("Running task 1")); Thread thread2 = threadFactory.newThread(() -> LOGGER.info("Running task 2")); Thread thread3 = threadFactory.newThread(() -> LOGGER.info("Running task 3")); // Thread index is allocated statically, so cannot deterministically guarantee the thread number // Work out thread1's number and then check subsequent thread names int threadNumber = Integer.parseInt(thread1.getName().substring(thread1.getName().lastIndexOf('-') + 1)); assertThat(thread1.getName()).isEqualTo("kda-writer-thread-" + threadNumber++); assertThat(thread1.isDaemon()).isFalse(); assertThat(thread2.getName()).isEqualTo("kda-writer-thread-" + threadNumber++); assertThat(thread2.isDaemon()).isFalse(); assertThat(thread3.getName()).isEqualTo("kda-writer-thread-" + threadNumber); assertThat(thread3.isDaemon()).isFalse(); } @Nonnull private CompletableFuture<UserRecordResult> addRecord(final FirehoseProducer<UserRecordResult, Record> producer) { return addRecord(producer, 64); } @Nonnull private CompletableFuture<UserRecordResult> addRecord(final FirehoseProducer<UserRecordResult, Record> producer, final int length) { try { Record record = new Record().withData(ByteBuffer.wrap( RandomStringUtils.randomAlphabetic(length).getBytes())); return producer.addUserRecord(record); } catch (Exception e) { throw new RuntimeException(e); } } @Nonnull private FirehoseProducer<UserRecordResult, Record> createFirehoseProducer() { return createFirehoseProducer(new Properties()); } @Nonnull private FirehoseProducer<UserRecordResult, Record> createFirehoseProducer(@Nonnull final Properties config) { config.setProperty(FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT, "1000"); config.setProperty(AWS_REGION, "us-east-1"); return new FirehoseProducer<>(DEFAULT_DELIVERY_STREAM, firehoseClient, config); } }
3,984
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/util/AWSUtilTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.util; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.BasicCredentialProvider; import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehose; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import javax.annotation.Nonnull; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_ACCESS_KEY_ID; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_PROFILE_NAME; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_REGION; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_SECRET_ACCESS_KEY; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.ASSUME_ROLE; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.AUTO; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.BASIC; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAXIMUM_BATCH_BYTES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.REDUCED_QUOTA_MAXIMUM_THROUGHPUT; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.createKinesisFirehoseClientFromConfiguration; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.getCredentialProviderType; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateAssumeRoleCredentialsProvider; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateBasicProviderConfiguration; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateConfiguration; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateProfileProviderConfiguration; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; public class AWSUtilTest { private Properties configProps; @BeforeMethod public void setUp() { configProps = new Properties(); configProps.setProperty(AWS_ACCESS_KEY_ID, "DUMMY"); configProps.setProperty(AWS_SECRET_ACCESS_KEY, "DUMMY-SECRET"); configProps.setProperty(AWS_PROFILE_NAME, "Test"); configProps.setProperty(AWS_REGION, "us-east-1"); } @Test public void testCreateKinesisFirehoseClientFromConfigurationWithNullConfiguration() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> createKinesisFirehoseClientFromConfiguration(null, new BasicCredentialProvider(configProps))) .withMessageContaining("Configuration properties cannot be null"); } @Test public void testCreateKinesisFirehoseClientFromConfigurationWithNullCredentialProvider() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> createKinesisFirehoseClientFromConfiguration(configProps, null)) .withMessageContaining("Credential Provider cannot be null"); } @Test public void testCreateKinesisFirehoseClientFromConfigurationHappyCase() { AmazonKinesisFirehose firehoseClient = createKinesisFirehoseClientFromConfiguration(configProps, new BasicCredentialProvider(configProps)); assertThat(firehoseClient).isNotNull(); } @Test public void testValidateConfigurationWithNullConfiguration() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> validateConfiguration(null)) .withMessageContaining("Configuration properties cannot be null"); } @Test public void testValidateConfigurationWithNoRegionOrFirehoseEndpoint() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> validateConfiguration(new Properties())) .withMessageContaining("Either AWS region should be specified or AWS Firehose endpoint and endpoint signing region"); } @Test public void testValidateConfigurationHappyCase() { Properties config = validateConfiguration(configProps); assertThat(configProps).isEqualTo(config); } @Test public void testValidateBasicConfigurationHappyCase() { Properties config = validateBasicProviderConfiguration(configProps); assertThat(configProps).isEqualTo(config); } @Test public void testValidateBasicConfigurationWithNullConfiguration() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> validateBasicProviderConfiguration(null)) .withMessageContaining("Configuration properties cannot be null"); } @Test public void testValidateBasicConfigurationWithNoAwsAccessKeyId() { configProps.remove(AWS_ACCESS_KEY_ID); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> validateBasicProviderConfiguration(configProps)) .withMessageContaining("AWS access key must be specified with credential provider BASIC"); } @Test public void testValidateBasicConfigurationWithNoAwsSecretKeyId() { configProps.remove(AWS_SECRET_ACCESS_KEY); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> validateBasicProviderConfiguration(configProps)) .withMessageContaining("AWS secret key must be specified with credential provider BASIC"); } @Test public void testValidateProfileProviderConfigurationWithNullConfiguration() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> validateProfileProviderConfiguration(null)) .withMessageContaining("Configuration properties cannot be null"); } @Test public void testValidateProfileProviderConfigurationHappyCase() { Properties config = validateProfileProviderConfiguration(configProps); assertThat(configProps).isEqualTo(config); } @Test public void testValidateProfileProviderConfigurationWithNoProfileName() { configProps.remove(AWS_PROFILE_NAME); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> validateProfileProviderConfiguration(configProps)) .withMessageContaining("AWS profile name should be specified with credential provider PROFILE"); } @Test public void testValidateAssumeRoleProviderConfigurationHappyCase() { Properties properties = buildAssumeRoleProperties(); assertThat(validateAssumeRoleCredentialsProvider(properties)).isEqualTo(properties); } @Test public void testValidateAssumeRoleProviderConfigurationWithNoRoleArn() { Properties properties = buildAssumeRoleProperties(); properties.remove(AWSConfigConstants.roleArn(AWS_CREDENTIALS_PROVIDER)); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> validateAssumeRoleCredentialsProvider(properties)) .withMessageContaining("AWS role arn to be assumed must be provided with credential provider type ASSUME_ROLE"); } @Test public void testValidateAssumeRoleProviderConfigurationWithNoRoleSessionName() { Properties properties = buildAssumeRoleProperties(); properties.remove(AWSConfigConstants.roleSessionName(AWS_CREDENTIALS_PROVIDER)); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> validateAssumeRoleCredentialsProvider(properties)) .withMessageContaining("AWS role session name must be provided with credential provider type ASSUME_ROLE"); } @Test public void testValidateAssumeRoleProviderConfigurationWithNullConfiguration() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> validateAssumeRoleCredentialsProvider(null)) .withMessageContaining("Configuration properties cannot be null"); } @Nonnull private Properties buildAssumeRoleProperties() { Properties properties = new Properties(); properties.putAll(configProps); properties.put(AWSConfigConstants.roleArn(AWS_CREDENTIALS_PROVIDER), "arn-1234567812345678"); properties.put(AWSConfigConstants.roleSessionName(AWS_CREDENTIALS_PROVIDER), "session-name"); return properties; } @Test public void testGetCredentialProviderTypeIsAutoNullProviderKey() { assertThat(getCredentialProviderType(new Properties(), null)).isEqualTo(AUTO); } @Test public void testGetCredentialProviderTypeIsAutoWithProviderKeyMismatch() { assertThat(getCredentialProviderType(configProps, "missing-key")).isEqualTo(AUTO); } @Test public void testGetCredentialProviderTypeIsAutoMissingAccessKey() { configProps.remove(AWS_ACCESS_KEY_ID); assertThat(getCredentialProviderType(configProps, null)).isEqualTo(AUTO); } @Test public void testGetCredentialProviderTypeIsAutoMissingSecretKey() { configProps.remove(AWS_SECRET_ACCESS_KEY); assertThat(getCredentialProviderType(configProps, null)).isEqualTo(AUTO); } @Test public void testGetCredentialProviderTypeIsBasic() { assertThat(getCredentialProviderType(configProps, null)).isEqualTo(BASIC); } @Test public void testGetCredentialProviderTypeIsAutoWithEmptyProviderKey() { configProps.setProperty("key", ""); assertThat(getCredentialProviderType(configProps, "key")).isEqualTo(AUTO); } @Test public void testGetCredentialProviderTypeIsAutoWithBadConfiguration() { configProps.setProperty("key", "Bad"); assertThat(getCredentialProviderType(configProps, "key")).isEqualTo(AUTO); } @Test public void testGetCredentialProviderTypeIsParsedFromProviderKey() { configProps.setProperty("key", "ASSUME_ROLE"); assertThat(getCredentialProviderType(configProps, "key")).isEqualTo(ASSUME_ROLE); } @Test public void testGetDefaultMaxPutRecordBatchBytesForNullRegion() { assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes(null)).isEqualTo(REDUCED_QUOTA_MAXIMUM_THROUGHPUT); } @Test public void testGetDefaultMaxPutRecordBatchBytesForHighQuotaRegions() { assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes("us-east-1")).isEqualTo(DEFAULT_MAXIMUM_BATCH_BYTES); assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes("us-west-2")).isEqualTo(DEFAULT_MAXIMUM_BATCH_BYTES); assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes("eu-west-1")).isEqualTo(DEFAULT_MAXIMUM_BATCH_BYTES); } @Test public void testGetDefaultMaxPutRecordBatchBytesForReducedQuotaRegions() { assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes("us-east-2")).isEqualTo(REDUCED_QUOTA_MAXIMUM_THROUGHPUT); assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes("us-west-1")).isEqualTo(REDUCED_QUOTA_MAXIMUM_THROUGHPUT); assertThat(AWSUtil.getDefaultMaxPutRecordBatchBytes("eu-west-2")).isEqualTo(REDUCED_QUOTA_MAXIMUM_THROUGHPUT); } }
3,985
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/config/AWSConfigConstantsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.config; import org.testng.annotations.Test; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; public class AWSConfigConstantsTest { @Test public void testAccessKeyId() { assertThat(AWSConfigConstants.accessKeyId("prefix")).isEqualTo("prefix.basic.aws_access_key_id"); } @Test public void testAccessKeyId_null() { assertThat(AWSConfigConstants.accessKeyId(null)).isEqualTo("aws_access_key_id"); } @Test public void testAccessKeyId_empty() { assertThat(AWSConfigConstants.accessKeyId("")).isEqualTo("aws_access_key_id"); } @Test public void testAccessKeyId_noPrefix() { assertThat(AWSConfigConstants.accessKeyId()).isEqualTo("aws_access_key_id"); } @Test public void testSecretKey() { assertThat(AWSConfigConstants.secretKey("prefix")).isEqualTo("prefix.basic.aws_secret_access_key"); } @Test public void testSecretKey_null() { assertThat(AWSConfigConstants.secretKey(null)).isEqualTo("aws_secret_access_key"); } @Test public void testSecretKey_empty() { assertThat(AWSConfigConstants.secretKey("")).isEqualTo("aws_secret_access_key"); } @Test public void testSecretKey_noPrefix() { assertThat(AWSConfigConstants.secretKey()).isEqualTo("aws_secret_access_key"); } @Test public void testProfilePath() { assertThat(AWSConfigConstants.profilePath("prefix")).isEqualTo("prefix.profile.path"); } @Test public void testProfilePath_empty() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AWSConfigConstants.profilePath("")); } @Test public void testProfileName() { assertThat(AWSConfigConstants.profileName("prefix")).isEqualTo("prefix.profile.name"); } @Test public void testProfileName_empty() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AWSConfigConstants.profileName("")); } @Test public void testRoleArn() { assertThat(AWSConfigConstants.roleArn("prefix")).isEqualTo("prefix.role.arn"); } @Test public void testRoleArn_empty() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AWSConfigConstants.roleArn("")); } @Test public void testRoleSessionName() { assertThat(AWSConfigConstants.roleSessionName("prefix")).isEqualTo("prefix.role.sessionName"); } @Test public void testRoleSessionName_empty() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AWSConfigConstants.roleSessionName("")); } @Test public void testExternalId() { assertThat(AWSConfigConstants.externalId("prefix")).isEqualTo("prefix.role.externalId"); } @Test public void testExternalId_empty() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AWSConfigConstants.externalId("")); } @Test public void testRoleCredentialsProvider() { assertThat(AWSConfigConstants.roleCredentialsProvider("prefix")).isEqualTo("prefix.role.provider"); } @Test public void testRoleCredentialsProvider_empty() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AWSConfigConstants.roleCredentialsProvider("")); } }
3,986
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/serialization/JsonSerializationSchemaTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.serialization; import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.SerializationException; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import static org.testng.Assert.assertNotNull; public class JsonSerializationSchemaTest { private JsonSerializationSchema<TestSerializable> serializationSchema; @BeforeMethod public void init() { serializationSchema = new JsonSerializationSchema<>(); } @Test public void testJsonSerializationSchemaHappyCase() { TestSerializable serializable = new TestSerializable(1, "Test description"); byte[] serialized = serializationSchema.serialize(serializable); assertNotNull(serialized); } @Test(expectedExceptions = NullPointerException.class) public void testJsonSerializationSchemaNullCase() { serializationSchema.serialize(null); } @Test(expectedExceptions = SerializationException.class, expectedExceptionsMessageRegExp = "Failed trying to serialize.*") public void testJsonSerializationSchemaInvalidSerializable() { JsonSerializationSchema<TestInvalidSerializable> serializationSchema = new JsonSerializationSchema<>(); TestInvalidSerializable invalidSerializable = new TestInvalidSerializable("Unit", "Test"); serializationSchema.serialize(invalidSerializable); } private static class TestSerializable { @JsonSerialize private final int id; @JsonSerialize private final String description; @JsonCreator public TestSerializable(final int id, final String desc) { this.id = id; this.description = desc; } } private static class TestInvalidSerializable { private final String firstName; private final String lastName; public TestInvalidSerializable(final String firstName, final String lastName) { this.firstName = firstName; this.lastName = lastName; } } }
3,987
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/BasicCredentialProviderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.Properties; import static org.assertj.core.api.Assertions.assertThat; public class BasicCredentialProviderTest { private BasicCredentialProvider basicCredentialProvider; @BeforeMethod public void setUp() { Properties properties = new Properties(); properties.put(AWSConfigConstants.accessKeyId(), "ACCESS"); properties.put(AWSConfigConstants.secretKey(), "SECRET"); properties.put(AWSConfigConstants.AWS_REGION, "eu-west-2"); basicCredentialProvider = new BasicCredentialProvider(properties); } @Test public void testGetAwsCredentialsProvider() { AWSCredentials credentials = basicCredentialProvider.getAwsCredentialsProvider().getCredentials(); assertThat(credentials.getAWSAccessKeyId()).isEqualTo("ACCESS"); assertThat(credentials.getAWSSecretKey()).isEqualTo("SECRET"); } @Test public void testGetAwsCredentialsProviderSuppliesCredentialsAfterRefresh() { AWSCredentialsProvider provider = basicCredentialProvider.getAwsCredentialsProvider(); provider.refresh(); AWSCredentials credentials = provider.getCredentials(); assertThat(credentials.getAWSAccessKeyId()).isEqualTo("ACCESS"); assertThat(credentials.getAWSSecretKey()).isEqualTo("SECRET"); } }
3,988
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/ProfileCredentialProviderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import org.testng.annotations.Test; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_REGION; import static org.assertj.core.api.Assertions.assertThat; public class ProfileCredentialProviderTest { @Test public void testGetAwsCredentialsProvider() { Properties properties = new Properties(); properties.put(AWS_REGION, "eu-west-2"); properties.put(AWSConfigConstants.profileName(AWS_CREDENTIALS_PROVIDER), "default"); properties.put(AWSConfigConstants.profilePath(AWS_CREDENTIALS_PROVIDER), "src/test/resources/profile"); AWSCredentials credentials = new ProfileCredentialProvider(properties) .getAwsCredentialsProvider().getCredentials(); assertThat(credentials.getAWSAccessKeyId()).isEqualTo("AKIAIOSFODNN7EXAMPLE"); assertThat(credentials.getAWSSecretKey()).isEqualTo("wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"); } }
3,989
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/CredentialProviderFactoryTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_ACCESS_KEY_ID; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_PROFILE_NAME; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_REGION; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_SECRET_ACCESS_KEY; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.ASSUME_ROLE; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.AUTO; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.BASIC; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.ENV_VARIABLES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.PROFILE; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.SYS_PROPERTIES; import static com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.factory.CredentialProviderFactory.newCredentialProvider; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; public class CredentialProviderFactoryTest { private Properties configProps; @BeforeMethod public void setUp() { configProps = new Properties(); configProps.setProperty(AWS_REGION, "us-west-2"); } @Test public void testBasicCredentialProviderHappyCase() { configProps.setProperty(AWS_ACCESS_KEY_ID, "accessKeyId"); configProps.setProperty(AWS_SECRET_ACCESS_KEY, "secretAccessKey"); CredentialProvider credentialProvider = newCredentialProvider(BASIC, configProps); assertThat(credentialProvider).isInstanceOf(BasicCredentialProvider.class); } @Test public void testBasicCredentialProviderWithNullProviderKey() { configProps.setProperty(AWS_ACCESS_KEY_ID, "accessKeyId"); configProps.setProperty(AWS_SECRET_ACCESS_KEY, "secretAccessKey"); CredentialProvider credentialProvider = newCredentialProvider(BASIC, configProps, null); assertThat(credentialProvider).isInstanceOf(BasicCredentialProvider.class); } @Test public void testBasicCredentialProviderInvalidConfigurationProperties() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> newCredentialProvider(BASIC, configProps)) .withMessageContaining("AWS access key must be specified with credential provider BASIC."); } @Test public void testProfileCredentialProviderHappyCase() { configProps.setProperty(AWS_PROFILE_NAME, "TEST"); CredentialProvider credentialProvider = newCredentialProvider(PROFILE, configProps); assertThat(credentialProvider).isInstanceOf(ProfileCredentialProvider.class); } @Test public void testProfileCredentialProviderInvalidConfigurationProperties() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> newCredentialProvider(PROFILE, configProps)) .withMessageContaining("AWS profile name should be specified with credential provider PROFILE."); } @Test public void testEnvironmentCredentialProviderHappyCase() { CredentialProvider credentialProvider = newCredentialProvider(ENV_VARIABLES, configProps); assertThat(credentialProvider).isInstanceOf(EnvironmentCredentialProvider.class); } @Test public void testSystemCredentialProviderHappyCase() { CredentialProvider credentialProvider = newCredentialProvider(SYS_PROPERTIES, configProps); assertThat(credentialProvider).isInstanceOf(SystemCredentialProvider.class); } @Test public void testDefaultCredentialProviderHappyCase() { CredentialProvider credentialProvider = newCredentialProvider(AUTO, configProps); assertThat(credentialProvider).isInstanceOf(DefaultCredentialProvider.class); } @Test public void testCredentialProviderWithNullProvider() { CredentialProvider credentialProvider = newCredentialProvider(null, configProps); assertThat(credentialProvider).isInstanceOf(DefaultCredentialProvider.class); } @Test public void testAssumeRoleCredentialProviderHappyCase() { configProps.setProperty(AWSConfigConstants.roleArn(AWS_CREDENTIALS_PROVIDER), "arn-1234567812345678"); configProps.setProperty(AWSConfigConstants.roleSessionName(AWS_CREDENTIALS_PROVIDER), "role-session"); CredentialProvider credentialProvider = newCredentialProvider(ASSUME_ROLE, configProps); assertThat(credentialProvider).isInstanceOf(AssumeRoleCredentialsProvider.class); } @Test public void testAssumeRoleCredentialProviderInvalidConfigurationProperties() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> newCredentialProvider(ASSUME_ROLE, configProps)) .withMessageContaining("AWS role arn to be assumed must be provided with credential provider type ASSUME_ROLE"); } }
3,990
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/CredentialProviderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import org.testng.annotations.Test; import java.util.Properties; import static org.assertj.core.api.Assertions.assertThat; public class CredentialProviderTest { @Test public void testGetProperties() { String key = "key"; Properties properties = new Properties(); properties.put(AWSConfigConstants.accessKeyId(key), "ACCESS"); properties.put(AWSConfigConstants.secretKey(key), "SECRET"); properties.put(AWSConfigConstants.AWS_REGION, "eu-west-2"); CredentialProvider provider = new BasicCredentialProvider(properties, key); assertThat(provider.getProperties()).isEqualTo(properties); assertThat(provider.getProviderKey()).isEqualTo(key); } }
3,991
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/AssumeRoleCredentialsProviderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import org.testng.annotations.Test; import javax.annotation.Nonnull; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; public class AssumeRoleCredentialsProviderTest { @Test public void testGetAwsCredentialsProviderWithDefaultPrefix() { Properties properties = createAssumeRoleProperties(AWS_CREDENTIALS_PROVIDER); AssumeRoleCredentialsProvider credentialsProvider = new AssumeRoleCredentialsProvider(properties); assertGetAwsCredentialsProvider(credentialsProvider); } @Test public void testGetAwsCredentialsProviderWithCustomPrefix() { Properties properties = createAssumeRoleProperties("prefix"); AssumeRoleCredentialsProvider credentialsProvider = new AssumeRoleCredentialsProvider(properties, "prefix"); assertGetAwsCredentialsProvider(credentialsProvider); } private void assertGetAwsCredentialsProvider(@Nonnull final AssumeRoleCredentialsProvider credentialsProvider) { STSAssumeRoleSessionCredentialsProvider expected = mock(STSAssumeRoleSessionCredentialsProvider.class); AssumeRoleCredentialsProvider provider = spy(credentialsProvider); doReturn(expected).when(provider).createAwsCredentialsProvider(any(), anyString(), anyString(), any()); assertThat(provider.getAwsCredentialsProvider()).isEqualTo(expected); verify(provider).createAwsCredentialsProvider(eq("arn-1234567812345678"), eq("session-name"), eq("external-id"), any()); } @Nonnull private Properties createAssumeRoleProperties(@Nonnull final String prefix) { Properties properties = new Properties(); properties.put(AWSConfigConstants.AWS_REGION, "eu-west-2"); properties.put(AWSConfigConstants.roleArn(prefix), "arn-1234567812345678"); properties.put(AWSConfigConstants.roleSessionName(prefix), "session-name"); properties.put(AWSConfigConstants.externalId(prefix), "external-id"); return properties; } }
3,992
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose/examples/SimpleStreamString.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.firehose.examples; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import com.amazonaws.services.kinesisanalytics.flink.connectors.producer.FlinkKinesisFirehoseProducer; import org.apache.commons.lang3.RandomStringUtils; import org.apache.flink.api.common.serialization.SimpleStringSchema; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.source.SourceFunction; import java.util.Properties; public class SimpleStreamString { private static final String SINK_NAME = "Flink Kinesis Firehose Sink"; public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStream<String> simpleStringStream = env.addSource(new EventsGenerator()); Properties configProps = new Properties(); configProps.setProperty(AWSConfigConstants.AWS_ACCESS_KEY_ID, "aws_access_key_id"); configProps.setProperty(AWSConfigConstants.AWS_SECRET_ACCESS_KEY, "aws_secret_access_key"); configProps.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); FlinkKinesisFirehoseProducer<String> producer = new FlinkKinesisFirehoseProducer<>("firehose-delivery-stream-name", new SimpleStringSchema(), configProps); simpleStringStream.addSink(producer).name(SINK_NAME); env.execute(); } /** * Data generator that creates strings starting with a sequence number followed by a dash and 12 random characters. */ public static class EventsGenerator implements SourceFunction<String> { private boolean running = true; @Override public void run(SourceContext<String> ctx) throws Exception { long seq = 0; while (running) { Thread.sleep(10); ctx.collect((seq++) + "-" + RandomStringUtils.randomAlphabetic(12)); } } @Override public void cancel() { running = false; } } }
3,993
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose/examples/WordCountData.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.firehose.examples; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; public class WordCountData { public static final String[] WORDS = new String[] { "To be, or not to be,--that is the question:--", "Whether 'tis nobler in the mind to suffer", "The slings and arrows of outrageous fortune", "Or to take arms against a sea of troubles,", "And by opposing end them?--To die,--to sleep,--", "No more; and by a sleep to say we end", "The heartache, and the thousand natural shocks", "That flesh is heir to,--'tis a consummation", "Devoutly to be wish'd. To die,--to sleep;--", "To sleep! perchance to dream:--ay, there's the rub;", "For in that sleep of death what dreams may come,", "When we have shuffled off this mortal coil,", "Must give us pause: there's the respect", "That makes calamity of so long life;", "For who would bear the whips and scorns of time,", "The oppressor's wrong, the proud man's contumely,", "The pangs of despis'd love, the law's delay,", "The insolence of office, and the spurns", "That patient merit of the unworthy takes,", "When he himself might his quietus make", "With a bare bodkin? who would these fardels bear,", "To grunt and sweat under a weary life,", "But that the dread of something after death,--", "The undiscover'd country, from whose bourn", "No traveller returns,--puzzles the will,", "And makes us rather bear those ills we have", "Than fly to others that we know not of?", "Thus conscience does make cowards of us all;", "And thus the native hue of resolution", "Is sicklied o'er with the pale cast of thought;", "And enterprises of great pith and moment,", "With this regard, their currents turn awry,", "And lose the name of action.--Soft you now!", "The fair Ophelia!--Nymph, in thy orisons", "Be all my sins remember'd." }; public static DataSet<String> getDefaultTextLineDataSet(ExecutionEnvironment env) { return env.fromElements(WORDS); } }
3,994
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose/examples/SimpleWordCount.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.firehose.examples; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import com.amazonaws.services.kinesisanalytics.flink.connectors.producer.FlinkKinesisFirehoseProducer; import com.amazonaws.services.kinesisanalytics.flink.connectors.serialization.JsonSerializationSchema; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.util.Collector; import java.util.Arrays; import java.util.Properties; public class SimpleWordCount { private static final String SINK_NAME = "Flink Kinesis Firehose Sink"; public static void main(String[] args) throws Exception { // set up the execution environment final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); // get input data DataStream<String> text = env.fromElements(WordCountData.WORDS); DataStream<Tuple2<String, Integer>> counts = // normalize and split each line text.map(line -> line.toLowerCase().split("\\W+")) // convert split line in pairs (2-tuples) containing: (word,1) .flatMap(new FlatMapFunction<String[], Tuple2<String, Integer>>() { @Override public void flatMap(String[] value, Collector<Tuple2<String, Integer>> out) throws Exception { Arrays.stream(value) .filter(t -> t.length() > 0) .forEach(t -> out.collect(new Tuple2<>(t, 1))); } }) // group by the tuple field "0" and sum up tuple field "1" .keyBy(0) .sum(1); Properties configProps = new Properties(); configProps.setProperty(AWSConfigConstants.AWS_ACCESS_KEY_ID, "aws_access_key_id"); configProps.setProperty(AWSConfigConstants.AWS_SECRET_ACCESS_KEY, "aws_secret_access_key"); configProps.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); FlinkKinesisFirehoseProducer<Tuple2<String, Integer>> producer = new FlinkKinesisFirehoseProducer<>("firehose-delivery-stream", new JsonSerializationSchema<>(), configProps); counts.addSink(producer).name(SINK_NAME); env.execute(); } }
3,995
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/firehose/examples/AssumeRoleSimpleStreamString.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.firehose.examples; import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants; import com.amazonaws.services.kinesisanalytics.flink.connectors.producer.FlinkKinesisFirehoseProducer; import org.apache.flink.api.common.serialization.SimpleStringSchema; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType.ASSUME_ROLE; /** * This example application streams dummy data to the specified Firehose using Assume Role authentication mechanism. * See https://docs.aws.amazon.com/kinesisanalytics/latest/java/examples-cross.html for more information. */ public class AssumeRoleSimpleStreamString { private static final String SINK_NAME = "Flink Kinesis Firehose Sink"; private static final String STREAM_NAME = "<replace-with-your-stream>"; private static final String ROLE_ARN = "<replace-with-your-role-arn>"; private static final String ROLE_SESSION_NAME = "<replace-with-your-role-session-name>"; private static final String REGION = "us-east-1"; public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStream<String> simpleStringStream = env.addSource(new SimpleStreamString.EventsGenerator()); Properties configProps = new Properties(); configProps.setProperty(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER, ASSUME_ROLE.name()); configProps.setProperty(AWSConfigConstants.AWS_ROLE_ARN, ROLE_ARN); configProps.setProperty(AWSConfigConstants.AWS_ROLE_SESSION_NAME, ROLE_SESSION_NAME); configProps.setProperty(AWSConfigConstants.AWS_REGION, REGION); FlinkKinesisFirehoseProducer<String> producer = new FlinkKinesisFirehoseProducer<>(STREAM_NAME, new SimpleStringSchema(), configProps); simpleStringStream.addSink(producer).name(SINK_NAME); env.execute(); } }
3,996
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/test/java/com/amazonaws/services/kinesisanalytics/flink/connectors/testutils/TestUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.testutils; import com.amazonaws.services.kinesisanalytics.flink.connectors.serialization.KinesisFirehoseSerializationSchema; import org.apache.flink.api.common.serialization.SerializationSchema; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_ACCESS_KEY_ID; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_REGION; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_SECRET_ACCESS_KEY; import static org.apache.flink.streaming.api.functions.sink.SinkFunction.Context; public final class TestUtils { private TestUtils() { } public static final String DEFAULT_DELIVERY_STREAM = "test-stream"; public static final String DEFAULT_TEST_ERROR_MSG = "Test exception"; public static Properties getStandardProperties() { Properties config = new Properties(); config.setProperty(AWS_REGION, "us-east-1"); config.setProperty(AWS_ACCESS_KEY_ID, "accessKeyId"); config.setProperty(AWS_SECRET_ACCESS_KEY, "awsSecretAccessKey"); return config; } public static KinesisFirehoseSerializationSchema<String> getKinesisFirehoseSerializationSchema() { return (KinesisFirehoseSerializationSchema<String>) element -> ByteBuffer.wrap(element.getBytes(StandardCharsets.UTF_8)); } public static SerializationSchema<String> getSerializationSchema() { return (SerializationSchema<String>) element -> ByteBuffer.wrap(element.getBytes(StandardCharsets.UTF_8)).array(); } public static Context<String> getContext() { return new Context<String>() { @Override public long currentProcessingTime() { return System.currentTimeMillis(); } @Override public long currentWatermark() { return 10L; } @Override public Long timestamp() { return System.currentTimeMillis(); } }; } }
3,997
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/FlinkKinesisFirehoseProducer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.producer; import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.FlinkKinesisFirehoseException; import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.RecordCouldNotBeSentException; import com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl.FirehoseProducer; import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.CredentialProvider; import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.factory.CredentialProviderFactory; import com.amazonaws.services.kinesisanalytics.flink.connectors.serialization.KinesisFirehoseSerializationSchema; import com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil; import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehose; import com.amazonaws.services.kinesisfirehose.model.Record; import org.apache.commons.lang3.Validate; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.nio.ByteBuffer; import java.util.Properties; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER; import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType; import static com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl.FirehoseProducer.UserRecordResult; import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.getCredentialProviderType; public class FlinkKinesisFirehoseProducer<OUT> extends RichSinkFunction<OUT> implements CheckpointedFunction { private static final Logger LOGGER = LoggerFactory.getLogger(FlinkKinesisFirehoseProducer.class); private final KinesisFirehoseSerializationSchema<OUT> schema; private final Properties config; private final CredentialProviderType credentialProviderType; /** Name of the default delivery stream to produce to. Can be overwritten by the serialization schema */ private final String defaultDeliveryStream; /** Specify whether stop and fail in case of an error */ private boolean failOnError; /** Remembers the last Async thrown exception */ private transient volatile Throwable lastThrownException; /** The Crendential provider should be not serialized */ private transient CredentialProvider credentialsProvider; /** AWS client cannot be serialized when building the Flink Job graph */ private transient AmazonKinesisFirehose firehoseClient; /** AWS Kinesis Firehose producer */ private transient IProducer<UserRecordResult, Record> firehoseProducer; /** * Creates a new Flink Kinesis Firehose Producer. * @param deliveryStream The AWS Kinesis Firehose delivery stream. * @param schema The Serialization schema for the given data type. * @param configProps The properties used to configure Kinesis Firehose client. * @param credentialProviderType The specified Credential Provider type. */ public FlinkKinesisFirehoseProducer(final String deliveryStream, final KinesisFirehoseSerializationSchema<OUT> schema, final Properties configProps, final CredentialProviderType credentialProviderType) { this.defaultDeliveryStream = Validate.notBlank(deliveryStream, "Delivery stream cannot be null or empty"); this.schema = Validate.notNull(schema, "Kinesis serialization schema cannot be null"); this.config = Validate.notNull(configProps, "Configuration properties cannot be null"); this.credentialProviderType = Validate.notNull(credentialProviderType, "Credential Provider type cannot be null"); } public FlinkKinesisFirehoseProducer(final String deliveryStream , final SerializationSchema<OUT> schema, final Properties configProps, final CredentialProviderType credentialProviderType) { this(deliveryStream, new KinesisFirehoseSerializationSchema<OUT>() { @Override public ByteBuffer serialize(OUT element) { return ByteBuffer.wrap(schema.serialize(element)); } }, configProps, credentialProviderType); } public FlinkKinesisFirehoseProducer(final String deliveryStream, final KinesisFirehoseSerializationSchema<OUT> schema, final Properties configProps) { this(deliveryStream, schema, configProps, getCredentialProviderType(configProps, AWS_CREDENTIALS_PROVIDER)); } public FlinkKinesisFirehoseProducer(final String deliveryStream, final SerializationSchema<OUT> schema, final Properties configProps) { this(deliveryStream, schema, configProps, getCredentialProviderType(configProps, AWS_CREDENTIALS_PROVIDER)); } public void setFailOnError(final boolean failOnError) { this.failOnError = failOnError; } @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.credentialsProvider = CredentialProviderFactory.newCredentialProvider(credentialProviderType, config); LOGGER.info("Credential provider: {}", credentialsProvider.getAwsCredentialsProvider().getClass().getName() ); this.firehoseClient = createKinesisFirehoseClient(); this.firehoseProducer = createFirehoseProducer(); LOGGER.info("Started Kinesis Firehose client. Delivering to stream: {}", defaultDeliveryStream); } @Nonnull AmazonKinesisFirehose createKinesisFirehoseClient() { return AWSUtil.createKinesisFirehoseClientFromConfiguration(config, credentialsProvider); } @Nonnull IProducer<UserRecordResult, Record> createFirehoseProducer() { return new FirehoseProducer<>(defaultDeliveryStream, firehoseClient, config); } @Override public void invoke(final OUT value, final Context context) throws Exception { Validate.notNull(value); ByteBuffer serializedValue = schema.serialize(value); Validate.validState((firehoseProducer != null && !firehoseProducer.isDestroyed()), "Firehose producer has been destroyed"); Validate.validState(firehoseClient != null, "Kinesis Firehose client has been closed"); propagateAsyncExceptions(); firehoseProducer .addUserRecord(new Record().withData(serializedValue)) .handleAsync((record, throwable) -> { if (throwable != null) { final String msg = "An error has occurred trying to write a record."; if (failOnError) { lastThrownException = throwable; } else { LOGGER.warn(msg, throwable); } } if (record != null && !record.isSuccessful()) { final String msg = "Record could not be successfully sent."; if (failOnError && lastThrownException == null) { lastThrownException = new RecordCouldNotBeSentException(msg, record.getException()); } else { LOGGER.warn(msg, record.getException()); } } return null; }); } @Override public void snapshotState(final FunctionSnapshotContext functionSnapshotContext) throws Exception { //Propagates asynchronously wherever exception that might happened previously. propagateAsyncExceptions(); //Forces the Firehose producer to flush the buffer. LOGGER.debug("Outstanding records before snapshot: {}", firehoseProducer.getOutstandingRecordsCount()); flushSync(); LOGGER.debug("Outstanding records after snapshot: {}", firehoseProducer.getOutstandingRecordsCount()); if (firehoseProducer.getOutstandingRecordsCount() > 0) { throw new IllegalStateException("An error has occurred trying to flush the buffer synchronously."); } // If the flush produced any exceptions, we should propagates it also and fail the checkpoint. propagateAsyncExceptions(); } @Override public void initializeState(final FunctionInitializationContext functionInitializationContext) throws Exception { //No Op } @Override public void close() throws Exception { try { super.close(); propagateAsyncExceptions(); } catch (Exception ex) { LOGGER.error(ex.getMessage(), ex); throw ex; } finally { flushSync(); firehoseProducer.destroy(); if (firehoseClient != null) { LOGGER.debug("Shutting down Kinesis Firehose client..."); firehoseClient.shutdown(); } } } private void propagateAsyncExceptions() throws Exception { if (lastThrownException == null) { return; } final String msg = "An exception has been thrown while trying to process a record"; if (failOnError) { throw new FlinkKinesisFirehoseException(msg, lastThrownException); } else { LOGGER.warn(msg, lastThrownException); lastThrownException = null; } } /** * This method waits until the buffer is flushed, an error has occurred or the thread was interrupted. */ private void flushSync() { while (firehoseProducer.getOutstandingRecordsCount() > 0 && !firehoseProducer.isFlushFailed()) { firehoseProducer.flush(); try { LOGGER.debug("Number of outstanding records before going to sleep: {}", firehoseProducer.getOutstandingRecordsCount()); Thread.sleep(500); } catch (InterruptedException ex) { LOGGER.warn("Flushing has been interrupted."); break; } } } }
3,998
0
Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors
Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/IProducer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.kinesisanalytics.flink.connectors.producer; import java.util.concurrent.CompletableFuture; /** * Interface responsible for sending data a specific sink */ public interface IProducer<O, R> { /** * This method should send data to an specific destination. * @param record the type of data to be sent * @return a {@code ListenableFuture} with the result for the operation. * @throws Exception */ CompletableFuture<O> addUserRecord(final R record) throws Exception; /** * This method should send data to an specific destination * @param record the type of data to be sent * @param operationTimeoutInMillis the expected operation timeout * @return a {@code ListenableFuture} with the result for the operation. * @throws Exception */ CompletableFuture<O> addUserRecord(final R record, final long operationTimeoutInMillis) throws Exception; /** * Destroy and release any used resource. * @throws Exception */ void destroy() throws Exception; /** * Returns whether the producer has been destroyed or not * @return */ boolean isDestroyed(); /** * Should return the number of outstanding records if the producer implements buffering. * @return an integer with the number of outstanding records. */ int getOutstandingRecordsCount(); /** * This method flushes the buffer immediately. */ void flush(); /** * Performs a synchronous flush on the buffer waiting until the whole buffer is drained. */ void flushSync(); /** * A flag representing whether the flush has failed or not. * @return {@code boolean} representing whether the success of failure of flush buffer operation. */ boolean isFlushFailed(); }
3,999