repo_name
stringlengths 7
104
| file_path
stringlengths 13
198
| context
stringlengths 67
7.15k
| import_statement
stringlengths 16
4.43k
| code
stringlengths 40
6.98k
| prompt
stringlengths 227
8.27k
| next_line
stringlengths 8
795
|
|---|---|---|---|---|---|---|
DroidKaigi/conference-app-2017
|
app/src/main/java/io/github/droidkaigi/confsched2017/api/DroidKaigiClient.java
|
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/api/service/DroidKaigiService.java
// public interface DroidKaigiService {
//
// @GET("sessions.json")
// Single<List<Session>> getSessionsJa();
//
// @GET("en/sessions.json")
// Single<List<Session>> getSessionsEn();
//
// }
//
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/api/service/GoogleFormService.java
// public interface GoogleFormService {
//
// @POST("e/1FAIpQLSf5NydpYm48GXqlKqbG3e0dna3bw5HJ4GUg8W1Yfe4znTWH_g/formResponse")
// @FormUrlEncoded
// Single<Response<Void>> submitSessionFeedback(
// @Field("entry.1298546024") int sessionId,
// @Field("entry.413792998") String sessionTitle,
// @Field("entry.335146475") int relevancy,
// @Field("entry.1916895481") int asExpected,
// @Field("entry.1501292277") int difficulty,
// @Field("entry.2121897737") int knowledgeable,
// @Field("entry.645604473") String comment);
//
// }
//
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/model/SessionFeedback.java
// @Table
// public class SessionFeedback {
//
// @PrimaryKey(auto = false)
// @Column(indexed = true)
// @SerializedName("session_id")
// public int sessionId;
//
// @Column
// @SerializedName("session_title")
// public String sessionTitle;
//
// @Column
// @SerializedName("relevancy")
// public int relevancy;
//
// @Column
// @SerializedName("as_expected")
// public int asExpected;
//
// @Column
// @SerializedName("difficulty")
// public int difficulty;
//
// @Column
// @SerializedName("knowledgeable")
// public int knowledgeable;
//
// @Column
// @Nullable
// @SerializedName("comment")
// public String comment;
//
// @Column
// @SerializedName("is_submitted")
// public boolean isSubmitted;
//
// public SessionFeedback() {
//
// }
//
// public SessionFeedback(@NonNull Session session, int relevancy, int asExpected,
// int difficulty, int knowledgeable, @Nullable String comment) {
// this.sessionId = session.id;
// this.sessionTitle = session.title;
// this.relevancy = relevancy;
// this.asExpected = asExpected;
// this.difficulty = difficulty;
// this.knowledgeable = knowledgeable;
// this.comment = comment;
// }
//
// public boolean isAllFilled() {
// return sessionId > 0
// && sessionTitle != null
// && relevancy > 0
// && asExpected > 0
// && difficulty > 0
// && knowledgeable > 0;
// }
// }
|
import android.support.annotation.NonNull;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.github.droidkaigi.confsched2017.api.service.DroidKaigiService;
import io.github.droidkaigi.confsched2017.api.service.GithubService;
import io.github.droidkaigi.confsched2017.api.service.GoogleFormService;
import io.github.droidkaigi.confsched2017.model.Contributor;
import io.github.droidkaigi.confsched2017.model.Session;
import io.github.droidkaigi.confsched2017.model.SessionFeedback;
import io.reactivex.Single;
import retrofit2.Response;
|
package io.github.droidkaigi.confsched2017.api;
@Singleton
public class DroidKaigiClient {
private final DroidKaigiService droidKaigiService;
private final GithubService githubService;
private final GoogleFormService googleFormService;
private static final int INCLUDE_ANONYMOUS = 1;
private static final int MAX_PER_PAGE = 100;
@Inject
public DroidKaigiClient(DroidKaigiService droidKaigiService, GithubService githubService,
GoogleFormService googleFormService) {
this.droidKaigiService = droidKaigiService;
this.githubService = githubService;
this.googleFormService = googleFormService;
}
public Single<List<Session>> getSessions(@NonNull Locale locale) {
if (locale == Locale.JAPANESE) {
return droidKaigiService.getSessionsJa();
} else {
return droidKaigiService.getSessionsEn();
}
}
public Single<List<Contributor>> getContributors() {
return githubService.getContributors("DroidKaigi", "conference-app-2017", INCLUDE_ANONYMOUS, MAX_PER_PAGE);
}
|
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/api/service/DroidKaigiService.java
// public interface DroidKaigiService {
//
// @GET("sessions.json")
// Single<List<Session>> getSessionsJa();
//
// @GET("en/sessions.json")
// Single<List<Session>> getSessionsEn();
//
// }
//
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/api/service/GoogleFormService.java
// public interface GoogleFormService {
//
// @POST("e/1FAIpQLSf5NydpYm48GXqlKqbG3e0dna3bw5HJ4GUg8W1Yfe4znTWH_g/formResponse")
// @FormUrlEncoded
// Single<Response<Void>> submitSessionFeedback(
// @Field("entry.1298546024") int sessionId,
// @Field("entry.413792998") String sessionTitle,
// @Field("entry.335146475") int relevancy,
// @Field("entry.1916895481") int asExpected,
// @Field("entry.1501292277") int difficulty,
// @Field("entry.2121897737") int knowledgeable,
// @Field("entry.645604473") String comment);
//
// }
//
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/model/SessionFeedback.java
// @Table
// public class SessionFeedback {
//
// @PrimaryKey(auto = false)
// @Column(indexed = true)
// @SerializedName("session_id")
// public int sessionId;
//
// @Column
// @SerializedName("session_title")
// public String sessionTitle;
//
// @Column
// @SerializedName("relevancy")
// public int relevancy;
//
// @Column
// @SerializedName("as_expected")
// public int asExpected;
//
// @Column
// @SerializedName("difficulty")
// public int difficulty;
//
// @Column
// @SerializedName("knowledgeable")
// public int knowledgeable;
//
// @Column
// @Nullable
// @SerializedName("comment")
// public String comment;
//
// @Column
// @SerializedName("is_submitted")
// public boolean isSubmitted;
//
// public SessionFeedback() {
//
// }
//
// public SessionFeedback(@NonNull Session session, int relevancy, int asExpected,
// int difficulty, int knowledgeable, @Nullable String comment) {
// this.sessionId = session.id;
// this.sessionTitle = session.title;
// this.relevancy = relevancy;
// this.asExpected = asExpected;
// this.difficulty = difficulty;
// this.knowledgeable = knowledgeable;
// this.comment = comment;
// }
//
// public boolean isAllFilled() {
// return sessionId > 0
// && sessionTitle != null
// && relevancy > 0
// && asExpected > 0
// && difficulty > 0
// && knowledgeable > 0;
// }
// }
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/api/DroidKaigiClient.java
import android.support.annotation.NonNull;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.github.droidkaigi.confsched2017.api.service.DroidKaigiService;
import io.github.droidkaigi.confsched2017.api.service.GithubService;
import io.github.droidkaigi.confsched2017.api.service.GoogleFormService;
import io.github.droidkaigi.confsched2017.model.Contributor;
import io.github.droidkaigi.confsched2017.model.Session;
import io.github.droidkaigi.confsched2017.model.SessionFeedback;
import io.reactivex.Single;
import retrofit2.Response;
package io.github.droidkaigi.confsched2017.api;
@Singleton
public class DroidKaigiClient {
private final DroidKaigiService droidKaigiService;
private final GithubService githubService;
private final GoogleFormService googleFormService;
private static final int INCLUDE_ANONYMOUS = 1;
private static final int MAX_PER_PAGE = 100;
@Inject
public DroidKaigiClient(DroidKaigiService droidKaigiService, GithubService githubService,
GoogleFormService googleFormService) {
this.droidKaigiService = droidKaigiService;
this.githubService = githubService;
this.googleFormService = googleFormService;
}
public Single<List<Session>> getSessions(@NonNull Locale locale) {
if (locale == Locale.JAPANESE) {
return droidKaigiService.getSessionsJa();
} else {
return droidKaigiService.getSessionsEn();
}
}
public Single<List<Contributor>> getContributors() {
return githubService.getContributors("DroidKaigi", "conference-app-2017", INCLUDE_ANONYMOUS, MAX_PER_PAGE);
}
|
public Single<Response<Void>> submitSessionFeedback(@NonNull SessionFeedback sessionFeedback) {
|
DroidKaigi/conference-app-2017
|
app/src/main/java/io/github/droidkaigi/confsched2017/repository/sessions/MySessionsRepository.java
|
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/model/MySession.java
// @Table
// public class MySession {
//
// @PrimaryKey
// @Column(indexed = true)
// @SerializedName("id")
// public int id;
//
// @Column(indexed = true, unique = true)
// @SerializedName("session")
// public Session session;
//
// public MySession() {
// }
//
// public MySession(@NonNull Session session) {
// this.session = session;
// }
//
// @Override
// public boolean equals(Object o) {
// return o instanceof MySession && ((MySession) o).id == id || super.equals(o);
// }
//
// @Override
// public int hashCode() {
// return id;
// }
// }
|
import android.support.annotation.NonNull;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.github.droidkaigi.confsched2017.model.MySession;
import io.github.droidkaigi.confsched2017.model.Session;
import io.reactivex.Completable;
import io.reactivex.Single;
|
package io.github.droidkaigi.confsched2017.repository.sessions;
@Singleton
public class MySessionsRepository implements MySessionsDataSource {
private final MySessionsDataSource localDataSource;
|
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/model/MySession.java
// @Table
// public class MySession {
//
// @PrimaryKey
// @Column(indexed = true)
// @SerializedName("id")
// public int id;
//
// @Column(indexed = true, unique = true)
// @SerializedName("session")
// public Session session;
//
// public MySession() {
// }
//
// public MySession(@NonNull Session session) {
// this.session = session;
// }
//
// @Override
// public boolean equals(Object o) {
// return o instanceof MySession && ((MySession) o).id == id || super.equals(o);
// }
//
// @Override
// public int hashCode() {
// return id;
// }
// }
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/repository/sessions/MySessionsRepository.java
import android.support.annotation.NonNull;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.github.droidkaigi.confsched2017.model.MySession;
import io.github.droidkaigi.confsched2017.model.Session;
import io.reactivex.Completable;
import io.reactivex.Single;
package io.github.droidkaigi.confsched2017.repository.sessions;
@Singleton
public class MySessionsRepository implements MySessionsDataSource {
private final MySessionsDataSource localDataSource;
|
private Map<Integer, MySession> cachedMySessions;
|
DroidKaigi/conference-app-2017
|
app/src/main/java/io/github/droidkaigi/confsched2017/viewmodel/SearchViewModel.java
|
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/repository/sessions/MySessionsRepository.java
// @Singleton
// public class MySessionsRepository implements MySessionsDataSource {
//
// private final MySessionsDataSource localDataSource;
//
// private Map<Integer, MySession> cachedMySessions;
//
// @Inject
// public MySessionsRepository(MySessionsLocalDataSource localDataSource) {
// this.localDataSource = localDataSource;
// this.cachedMySessions = new LinkedHashMap<>();
// }
//
// @Override
// public Single<List<MySession>> findAll() {
// if (cachedMySessions != null && !cachedMySessions.isEmpty()) {
// return Single.create(emitter -> {
// emitter.onSuccess(new ArrayList<>(cachedMySessions.values()));
// });
// }
//
// return localDataSource.findAll().doOnSuccess(this::refreshCache);
// }
//
// @Override
// public Completable save(@NonNull Session session) {
// cachedMySessions.put(session.id, new MySession(session));
// return localDataSource.save(session);
// }
//
// @Override
// public Single<Integer> delete(@NonNull Session session) {
// cachedMySessions.remove(session.id);
// return localDataSource.delete(session);
// }
//
// @Override
// public boolean isExist(int sessionId) {
// return localDataSource.isExist(sessionId);
// }
//
// private void refreshCache(List<MySession> mySessions) {
// if (cachedMySessions == null) {
// cachedMySessions = new LinkedHashMap<>();
// }
// cachedMySessions.clear();
// for (MySession mySession : mySessions) {
// cachedMySessions.put(mySession.session.id, mySession);
// }
// }
// }
//
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/view/helper/Navigator.java
// @ActivityScope
// public class Navigator {
//
// private final Activity activity;
//
// @Inject
// public Navigator(AppCompatActivity activity) {
// this.activity = activity;
// }
//
// public void navigateToSessionDetail(@NonNull Session session, @Nullable Class<? extends Activity> parentClass) {
// activity.startActivity(SessionDetailActivity.createIntent(activity, session.id, parentClass));
// }
//
// public void navigateToFeedbackPage(@NonNull Session session) {
// activity.startActivity(SessionFeedbackActivity.createIntent(activity, session.id));
// }
//
// public void navigateToSponsorsPage() {
// activity.startActivity(SponsorsActivity.createIntent(activity));
// }
//
// public void navigateToContributorsPage() {
// activity.startActivity(ContributorsActivity.createIntent(activity));
// }
//
// public void navigateToLicensePage() {
// activity.startActivity(LicensesActivity.createIntent(activity));
// }
//
// public void navigateToWebPage(@NonNull String url) {
// if (TextUtils.isEmpty(url) || !URLUtil.isNetworkUrl(url)) {
// return;
// }
//
// CustomTabsIntent intent = new CustomTabsIntent.Builder()
// .setShowTitle(true)
// .setToolbarColor(ContextCompat.getColor(activity, R.color.theme))
// .build();
//
// intent.launchUrl(activity, Uri.parse(url));
// }
//
// public void showConfirmDialog(@StringRes int titleResId, @StringRes int messageResId,
// @NonNull ConfirmDialogListener listener) {
// new AlertDialog.Builder(activity, R.style.DialogTheme)
// .setTitle(titleResId)
// .setMessage(messageResId)
// .setPositiveButton(android.R.string.ok, (dialogInterface, i) -> listener.onClickPositiveButton())
// .setNegativeButton(android.R.string.cancel, (dialogInterface, i) -> listener.onClickNegativeButton())
// .show();
// }
//
// public interface ConfirmDialogListener {
//
// void onClickPositiveButton();
//
// void onClickNegativeButton();
// }
//
// }
|
import com.annimon.stream.Stream;
import android.content.Context;
import android.databinding.BaseObservable;
import android.support.annotation.NonNull;
import android.view.View;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
import io.github.droidkaigi.confsched2017.model.Session;
import io.github.droidkaigi.confsched2017.repository.sessions.MySessionsRepository;
import io.github.droidkaigi.confsched2017.repository.sessions.SessionsRepository;
import io.github.droidkaigi.confsched2017.view.helper.Navigator;
import io.reactivex.Single;
|
package io.github.droidkaigi.confsched2017.viewmodel;
public final class SearchViewModel extends BaseObservable implements ViewModel {
private final Navigator navigator;
private SessionsRepository sessionsRepository;
|
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/repository/sessions/MySessionsRepository.java
// @Singleton
// public class MySessionsRepository implements MySessionsDataSource {
//
// private final MySessionsDataSource localDataSource;
//
// private Map<Integer, MySession> cachedMySessions;
//
// @Inject
// public MySessionsRepository(MySessionsLocalDataSource localDataSource) {
// this.localDataSource = localDataSource;
// this.cachedMySessions = new LinkedHashMap<>();
// }
//
// @Override
// public Single<List<MySession>> findAll() {
// if (cachedMySessions != null && !cachedMySessions.isEmpty()) {
// return Single.create(emitter -> {
// emitter.onSuccess(new ArrayList<>(cachedMySessions.values()));
// });
// }
//
// return localDataSource.findAll().doOnSuccess(this::refreshCache);
// }
//
// @Override
// public Completable save(@NonNull Session session) {
// cachedMySessions.put(session.id, new MySession(session));
// return localDataSource.save(session);
// }
//
// @Override
// public Single<Integer> delete(@NonNull Session session) {
// cachedMySessions.remove(session.id);
// return localDataSource.delete(session);
// }
//
// @Override
// public boolean isExist(int sessionId) {
// return localDataSource.isExist(sessionId);
// }
//
// private void refreshCache(List<MySession> mySessions) {
// if (cachedMySessions == null) {
// cachedMySessions = new LinkedHashMap<>();
// }
// cachedMySessions.clear();
// for (MySession mySession : mySessions) {
// cachedMySessions.put(mySession.session.id, mySession);
// }
// }
// }
//
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/view/helper/Navigator.java
// @ActivityScope
// public class Navigator {
//
// private final Activity activity;
//
// @Inject
// public Navigator(AppCompatActivity activity) {
// this.activity = activity;
// }
//
// public void navigateToSessionDetail(@NonNull Session session, @Nullable Class<? extends Activity> parentClass) {
// activity.startActivity(SessionDetailActivity.createIntent(activity, session.id, parentClass));
// }
//
// public void navigateToFeedbackPage(@NonNull Session session) {
// activity.startActivity(SessionFeedbackActivity.createIntent(activity, session.id));
// }
//
// public void navigateToSponsorsPage() {
// activity.startActivity(SponsorsActivity.createIntent(activity));
// }
//
// public void navigateToContributorsPage() {
// activity.startActivity(ContributorsActivity.createIntent(activity));
// }
//
// public void navigateToLicensePage() {
// activity.startActivity(LicensesActivity.createIntent(activity));
// }
//
// public void navigateToWebPage(@NonNull String url) {
// if (TextUtils.isEmpty(url) || !URLUtil.isNetworkUrl(url)) {
// return;
// }
//
// CustomTabsIntent intent = new CustomTabsIntent.Builder()
// .setShowTitle(true)
// .setToolbarColor(ContextCompat.getColor(activity, R.color.theme))
// .build();
//
// intent.launchUrl(activity, Uri.parse(url));
// }
//
// public void showConfirmDialog(@StringRes int titleResId, @StringRes int messageResId,
// @NonNull ConfirmDialogListener listener) {
// new AlertDialog.Builder(activity, R.style.DialogTheme)
// .setTitle(titleResId)
// .setMessage(messageResId)
// .setPositiveButton(android.R.string.ok, (dialogInterface, i) -> listener.onClickPositiveButton())
// .setNegativeButton(android.R.string.cancel, (dialogInterface, i) -> listener.onClickNegativeButton())
// .show();
// }
//
// public interface ConfirmDialogListener {
//
// void onClickPositiveButton();
//
// void onClickNegativeButton();
// }
//
// }
// Path: app/src/main/java/io/github/droidkaigi/confsched2017/viewmodel/SearchViewModel.java
import com.annimon.stream.Stream;
import android.content.Context;
import android.databinding.BaseObservable;
import android.support.annotation.NonNull;
import android.view.View;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
import io.github.droidkaigi.confsched2017.model.Session;
import io.github.droidkaigi.confsched2017.repository.sessions.MySessionsRepository;
import io.github.droidkaigi.confsched2017.repository.sessions.SessionsRepository;
import io.github.droidkaigi.confsched2017.view.helper.Navigator;
import io.reactivex.Single;
package io.github.droidkaigi.confsched2017.viewmodel;
public final class SearchViewModel extends BaseObservable implements ViewModel {
private final Navigator navigator;
private SessionsRepository sessionsRepository;
|
private MySessionsRepository mySessionsRepository;
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializer.java
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/util/ByteArrayBuffer.java
// public class ByteArrayBuffer {
//
// private final SegmentedByteArray data;
//
// private long pointer;
//
// public ByteArrayBuffer() {
// this.data = new SegmentedByteArray(new ByteSegmentPool(14));
// this.pointer = 0;
// }
//
// /**
// * @deprecated Use zero-argument constructor instead.
// */
// @Deprecated
// public ByteArrayBuffer(int initialSize) {
// this();
// }
//
// /**
// * Copies the contents of the specified buffer into this buffer at the current position.
// */
// public void write(ByteArrayBuffer buf) {
// data.copy(buf.data, 0, pointer, buf.length());
// pointer += buf.length();
// }
//
// /**
// * Writes a variable-byte encoded integer to the byte array.
// */
// public void writeVInt(int value) {
// if(value == -1) {
// writeByte((byte)0x80);
// return;
// } else if(value < 0) {
// writeByte((byte)(0x80 | ((value >>> 28))));
// writeByte((byte)(0x80 | ((value >>> 21) & 0x7F)));
// writeByte((byte)(0x80 | ((value >>> 14) & 0x7F)));
// writeByte((byte)(0x80 | ((value >>> 7) & 0x7F)));
// writeByte((byte)(value & 0x7F));
// } else {
// if(value > 0x0FFFFFFF) writeByte((byte)(0x80 | ((value >>> 28))));
// if(value > 0x1FFFFF) writeByte((byte)(0x80 | ((value >>> 21) & 0x7F)));
// if(value > 0x3FFF) writeByte((byte)(0x80 | ((value >>> 14) & 0x7F)));
// if(value > 0x7F) writeByte((byte)(0x80 | ((value >>> 7) & 0x7F)));
//
// writeByte((byte)(value & 0x7F));
// }
// }
//
// /**
// * The current length of the written data, in bytes.
// */
// public long length() {
// return pointer;
// }
//
// /**
// * Sets the length of the written data to 0.
// */
// public void reset() {
// pointer = 0;
// }
//
// /**
// * @return The underlying SegmentedByteArray containing the written data.
// */
// public SegmentedByteArray getData() {
// return data;
// }
//
// /**
// * Writes a byte of data.
// */
// public void writeByte(byte b) {
// data.set(pointer++, b);
// }
//
// /**
// * Writes each byte of data, in order.
// */
// public void write(byte[] data) {
// for(int i=0;i<data.length;i++) {
// writeByte(data[i]);
// }
// }
//
// /**
// * Copies the written data to the given <code>OutputStream</code>
// */
// public void copyTo(OutputStream os) throws IOException {
// data.writeTo(os, 0, pointer);
// }
//
// }
|
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Map;
|
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializer {
private final NFCompressedGraphPointers pointers;
private final long dataLength;
NFCompressedGraphPointersSerializer(NFCompressedGraphPointers pointers, long dataLength) {
this.pointers = pointers;
this.dataLength = dataLength;
}
void serializePointers(DataOutputStream dos) throws IOException {
int numNodeTypes = pointers.asMap().size();
if(dataLength > 0xFFFFFFFFL)
numNodeTypes |= Integer.MIN_VALUE;
/// In order to maintain backwards compatibility of produced artifacts,
/// if more than 32 bits is required to represent the pointers, then flag
/// the sign bit in the serialized number of node types.
dos.writeInt(numNodeTypes);
for(Map.Entry<String, long[]>entry : pointers.asMap().entrySet()) {
dos.writeUTF(entry.getKey());
serializePointerArray(dos, entry.getValue());
}
}
private void serializePointerArray(DataOutputStream dos, long pointers[]) throws IOException {
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/util/ByteArrayBuffer.java
// public class ByteArrayBuffer {
//
// private final SegmentedByteArray data;
//
// private long pointer;
//
// public ByteArrayBuffer() {
// this.data = new SegmentedByteArray(new ByteSegmentPool(14));
// this.pointer = 0;
// }
//
// /**
// * @deprecated Use zero-argument constructor instead.
// */
// @Deprecated
// public ByteArrayBuffer(int initialSize) {
// this();
// }
//
// /**
// * Copies the contents of the specified buffer into this buffer at the current position.
// */
// public void write(ByteArrayBuffer buf) {
// data.copy(buf.data, 0, pointer, buf.length());
// pointer += buf.length();
// }
//
// /**
// * Writes a variable-byte encoded integer to the byte array.
// */
// public void writeVInt(int value) {
// if(value == -1) {
// writeByte((byte)0x80);
// return;
// } else if(value < 0) {
// writeByte((byte)(0x80 | ((value >>> 28))));
// writeByte((byte)(0x80 | ((value >>> 21) & 0x7F)));
// writeByte((byte)(0x80 | ((value >>> 14) & 0x7F)));
// writeByte((byte)(0x80 | ((value >>> 7) & 0x7F)));
// writeByte((byte)(value & 0x7F));
// } else {
// if(value > 0x0FFFFFFF) writeByte((byte)(0x80 | ((value >>> 28))));
// if(value > 0x1FFFFF) writeByte((byte)(0x80 | ((value >>> 21) & 0x7F)));
// if(value > 0x3FFF) writeByte((byte)(0x80 | ((value >>> 14) & 0x7F)));
// if(value > 0x7F) writeByte((byte)(0x80 | ((value >>> 7) & 0x7F)));
//
// writeByte((byte)(value & 0x7F));
// }
// }
//
// /**
// * The current length of the written data, in bytes.
// */
// public long length() {
// return pointer;
// }
//
// /**
// * Sets the length of the written data to 0.
// */
// public void reset() {
// pointer = 0;
// }
//
// /**
// * @return The underlying SegmentedByteArray containing the written data.
// */
// public SegmentedByteArray getData() {
// return data;
// }
//
// /**
// * Writes a byte of data.
// */
// public void writeByte(byte b) {
// data.set(pointer++, b);
// }
//
// /**
// * Writes each byte of data, in order.
// */
// public void write(byte[] data) {
// for(int i=0;i<data.length;i++) {
// writeByte(data[i]);
// }
// }
//
// /**
// * Copies the written data to the given <code>OutputStream</code>
// */
// public void copyTo(OutputStream os) throws IOException {
// data.writeTo(os, 0, pointer);
// }
//
// }
// Path: src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializer.java
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Map;
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializer {
private final NFCompressedGraphPointers pointers;
private final long dataLength;
NFCompressedGraphPointersSerializer(NFCompressedGraphPointers pointers, long dataLength) {
this.pointers = pointers;
this.dataLength = dataLength;
}
void serializePointers(DataOutputStream dos) throws IOException {
int numNodeTypes = pointers.asMap().size();
if(dataLength > 0xFFFFFFFFL)
numNodeTypes |= Integer.MIN_VALUE;
/// In order to maintain backwards compatibility of produced artifacts,
/// if more than 32 bits is required to represent the pointers, then flag
/// the sign bit in the serialized number of node types.
dos.writeInt(numNodeTypes);
for(Map.Entry<String, long[]>entry : pointers.asMap().entrySet()) {
dos.writeUTF(entry.getKey());
serializePointerArray(dos, entry.getValue());
}
}
private void serializePointerArray(DataOutputStream dos, long pointers[]) throws IOException {
|
ByteArrayBuffer buf = new ByteArrayBuffer();
|
Netflix/netflix-graph
|
src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
|
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int HASH = 0x04;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int MODEL_SPECIFIC = 0x01;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int SINGLE = 0x02;
|
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
|
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int HASH = 0x04;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int MODEL_SPECIFIC = 0x01;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int SINGLE = 0x02;
// Path: src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
|
NFPropertySpec propertySpec = new NFPropertySpec(null, null, MODEL_SPECIFIC | HASH | SINGLE);
|
Netflix/netflix-graph
|
src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
|
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int HASH = 0x04;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int MODEL_SPECIFIC = 0x01;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int SINGLE = 0x02;
|
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
|
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int HASH = 0x04;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int MODEL_SPECIFIC = 0x01;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int SINGLE = 0x02;
// Path: src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
|
NFPropertySpec propertySpec = new NFPropertySpec(null, null, MODEL_SPECIFIC | HASH | SINGLE);
|
Netflix/netflix-graph
|
src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
|
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int HASH = 0x04;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int MODEL_SPECIFIC = 0x01;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int SINGLE = 0x02;
|
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
|
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int HASH = 0x04;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int MODEL_SPECIFIC = 0x01;
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java
// public static final int SINGLE = 0x02;
// Path: src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
|
NFPropertySpec propertySpec = new NFPropertySpec(null, null, MODEL_SPECIFIC | HASH | SINGLE);
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java
|
// Path: src/main/java/com/netflix/nfgraph/util/ArrayIterator.java
// public class ArrayIterator<T> implements Iterator<T> {
//
// private T arr[];
// private int size;
// private int counter = 0;
//
// public ArrayIterator(T arr[]) {
// this(arr, arr.length);
// }
//
// public ArrayIterator(T arr[], int size) {
// this.arr = arr;
// this.size = size;
// }
//
// @Override
// public boolean hasNext() {
// return counter < size;
// }
//
// @Override
// public T next() {
// return arr[counter++];
// }
//
// @Override
// public void remove() {
// throw new UnsupportedOperationException("Cannot remove elements from this array.");
// }
//
// }
|
import java.util.Iterator;
import com.netflix.nfgraph.exception.NFGraphException;
import com.netflix.nfgraph.util.ArrayIterator;
|
this.numSingleProperties = numSingleProperties;
this.numMultipleProperties = numMultipleProperties;
}
public String getNodeTypeName() {
return nodeTypeName;
}
public NFPropertySpec[] getPropertySpecs() {
return propertySpecs;
}
public NFPropertySpec getPropertySpec(String propertyName) {
for(NFPropertySpec spec : propertySpecs) {
if(spec.getName().equals(propertyName))
return spec;
}
throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
}
public int getNumSingleProperties() {
return numSingleProperties;
}
public int getNumMultipleProperties() {
return numMultipleProperties;
}
@Override
public Iterator<NFPropertySpec> iterator() {
|
// Path: src/main/java/com/netflix/nfgraph/util/ArrayIterator.java
// public class ArrayIterator<T> implements Iterator<T> {
//
// private T arr[];
// private int size;
// private int counter = 0;
//
// public ArrayIterator(T arr[]) {
// this(arr, arr.length);
// }
//
// public ArrayIterator(T arr[], int size) {
// this.arr = arr;
// this.size = size;
// }
//
// @Override
// public boolean hasNext() {
// return counter < size;
// }
//
// @Override
// public T next() {
// return arr[counter++];
// }
//
// @Override
// public void remove() {
// throw new UnsupportedOperationException("Cannot remove elements from this array.");
// }
//
// }
// Path: src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java
import java.util.Iterator;
import com.netflix.nfgraph.exception.NFGraphException;
import com.netflix.nfgraph.util.ArrayIterator;
this.numSingleProperties = numSingleProperties;
this.numMultipleProperties = numMultipleProperties;
}
public String getNodeTypeName() {
return nodeTypeName;
}
public NFPropertySpec[] getPropertySpecs() {
return propertySpecs;
}
public NFPropertySpec getPropertySpec(String propertyName) {
for(NFPropertySpec spec : propertySpecs) {
if(spec.getName().equals(propertyName))
return spec;
}
throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
}
public int getNumSingleProperties() {
return numSingleProperties;
}
public int getNumMultipleProperties() {
return numMultipleProperties;
}
@Override
public Iterator<NFPropertySpec> iterator() {
|
return new ArrayIterator<NFPropertySpec>(propertySpecs);
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
|
// Path: src/main/java/com/netflix/nfgraph/util/OrdinalMap.java
// public class OrdinalMap<T> implements Iterable<T> {
//
// private int hashedOrdinalArray[];
// private T objectsByOrdinal[];
//
// private int size;
//
// public OrdinalMap() {
// this(10);
// }
//
// @SuppressWarnings("unchecked")
// public OrdinalMap(int expectedSize) {
// int mapArraySize = 1 << (32 - Integer.numberOfLeadingZeros(expectedSize * 4 / 3));
// int ordinalArraySize = mapArraySize * 3 / 4;
//
// hashedOrdinalArray = newHashedOrdinalArray(mapArraySize);
// objectsByOrdinal = (T[]) new Object[ordinalArraySize];
// }
//
// /**
// * Add an object into this <code>OrdinalMap</code>. If the same object (or an {@link Object#equals(Object)} object) is
// * already in the map, then no changes will be made.
// *
// * @return the ordinal of <code>obj</code>
// */
// public int add(T obj) {
// int ordinal = get(obj);
// if(ordinal != -1)
// return ordinal;
//
// if(size == objectsByOrdinal.length)
// growCapacity();
//
// objectsByOrdinal[size] = obj;
// hashOrdinalIntoArray(size, hashedOrdinalArray);
//
// return size++;
// }
//
// /**
// * @return the ordinal of an object previously added to the map. If the object has not been added to the map, returns -1 instead.
// */
// public int get(T obj) {
// int hash = Mixer.hashInt(obj.hashCode());
//
// int bucket = hash % hashedOrdinalArray.length;
// int ordinal = hashedOrdinalArray[bucket];
//
// while(ordinal != -1) {
// if(objectsByOrdinal[ordinal].equals(obj))
// return ordinal;
//
// bucket = (bucket + 1) % hashedOrdinalArray.length;
// ordinal = hashedOrdinalArray[bucket];
// }
//
// return -1;
// }
//
// /**
// * @return the object for a given ordinal. If the ordinal does not yet exist, returns null.
// */
// public T get(int ordinal) {
// if(ordinal >= size)
// return null;
// return objectsByOrdinal[ordinal];
// }
//
// /**
// * @return the number of objects in this map.
// */
// public int size() {
// return size;
// }
//
// private void growCapacity() {
// int newHashedOrdinalArray[] = newHashedOrdinalArray(hashedOrdinalArray.length * 2);
//
// for(int i=0;i<objectsByOrdinal.length;i++) {
// hashOrdinalIntoArray(i, newHashedOrdinalArray);
// }
//
// objectsByOrdinal = Arrays.copyOf(objectsByOrdinal, objectsByOrdinal.length * 2);
// hashedOrdinalArray = newHashedOrdinalArray;
// }
//
// private void hashOrdinalIntoArray(int ordinal, int hashedOrdinalArray[]) {
// int hash = Mixer.hashInt(objectsByOrdinal[ordinal].hashCode());
//
// int bucket = hash % hashedOrdinalArray.length;
//
// while(hashedOrdinalArray[bucket] != -1) {
// bucket = (bucket + 1) % hashedOrdinalArray.length;
// }
//
// hashedOrdinalArray[bucket] = ordinal;
// }
//
// private int[] newHashedOrdinalArray(int length) {
// int arr[] = new int[length];
// Arrays.fill(arr, -1);
// return arr;
// }
//
// /**
// * @return an {@link Iterator} over the objects in this mapping.
// */
// @Override
// public Iterator<T> iterator() {
// return new ArrayIterator<T>(objectsByOrdinal, size);
// }
//
// }
|
import java.util.Iterator;
import com.netflix.nfgraph.util.OrdinalMap;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
/**
* <code>NFGraphModelHolder</code> maintains an ordering over the models in a given NFGraph.<p>
*
* An {@link NFGraph} may contain one or more "connection models". A "connection model" is a grouping of the set of connections
* between nodes in the graph.<p>
*
* Connections added for a connection model will be visible only for that model. Use of multiple connection models will
* add a minimum of one byte per model-specific connection set per node. As a result, this feature should be used only
* when the number of connection models is and will remain low.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFGraphModelHolder implements Iterable<String> {
public static final String CONNECTION_MODEL_GLOBAL = "global";
|
// Path: src/main/java/com/netflix/nfgraph/util/OrdinalMap.java
// public class OrdinalMap<T> implements Iterable<T> {
//
// private int hashedOrdinalArray[];
// private T objectsByOrdinal[];
//
// private int size;
//
// public OrdinalMap() {
// this(10);
// }
//
// @SuppressWarnings("unchecked")
// public OrdinalMap(int expectedSize) {
// int mapArraySize = 1 << (32 - Integer.numberOfLeadingZeros(expectedSize * 4 / 3));
// int ordinalArraySize = mapArraySize * 3 / 4;
//
// hashedOrdinalArray = newHashedOrdinalArray(mapArraySize);
// objectsByOrdinal = (T[]) new Object[ordinalArraySize];
// }
//
// /**
// * Add an object into this <code>OrdinalMap</code>. If the same object (or an {@link Object#equals(Object)} object) is
// * already in the map, then no changes will be made.
// *
// * @return the ordinal of <code>obj</code>
// */
// public int add(T obj) {
// int ordinal = get(obj);
// if(ordinal != -1)
// return ordinal;
//
// if(size == objectsByOrdinal.length)
// growCapacity();
//
// objectsByOrdinal[size] = obj;
// hashOrdinalIntoArray(size, hashedOrdinalArray);
//
// return size++;
// }
//
// /**
// * @return the ordinal of an object previously added to the map. If the object has not been added to the map, returns -1 instead.
// */
// public int get(T obj) {
// int hash = Mixer.hashInt(obj.hashCode());
//
// int bucket = hash % hashedOrdinalArray.length;
// int ordinal = hashedOrdinalArray[bucket];
//
// while(ordinal != -1) {
// if(objectsByOrdinal[ordinal].equals(obj))
// return ordinal;
//
// bucket = (bucket + 1) % hashedOrdinalArray.length;
// ordinal = hashedOrdinalArray[bucket];
// }
//
// return -1;
// }
//
// /**
// * @return the object for a given ordinal. If the ordinal does not yet exist, returns null.
// */
// public T get(int ordinal) {
// if(ordinal >= size)
// return null;
// return objectsByOrdinal[ordinal];
// }
//
// /**
// * @return the number of objects in this map.
// */
// public int size() {
// return size;
// }
//
// private void growCapacity() {
// int newHashedOrdinalArray[] = newHashedOrdinalArray(hashedOrdinalArray.length * 2);
//
// for(int i=0;i<objectsByOrdinal.length;i++) {
// hashOrdinalIntoArray(i, newHashedOrdinalArray);
// }
//
// objectsByOrdinal = Arrays.copyOf(objectsByOrdinal, objectsByOrdinal.length * 2);
// hashedOrdinalArray = newHashedOrdinalArray;
// }
//
// private void hashOrdinalIntoArray(int ordinal, int hashedOrdinalArray[]) {
// int hash = Mixer.hashInt(objectsByOrdinal[ordinal].hashCode());
//
// int bucket = hash % hashedOrdinalArray.length;
//
// while(hashedOrdinalArray[bucket] != -1) {
// bucket = (bucket + 1) % hashedOrdinalArray.length;
// }
//
// hashedOrdinalArray[bucket] = ordinal;
// }
//
// private int[] newHashedOrdinalArray(int length) {
// int arr[] = new int[length];
// Arrays.fill(arr, -1);
// return arr;
// }
//
// /**
// * @return an {@link Iterator} over the objects in this mapping.
// */
// @Override
// public Iterator<T> iterator() {
// return new ArrayIterator<T>(objectsByOrdinal, size);
// }
//
// }
// Path: src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
import java.util.Iterator;
import com.netflix.nfgraph.util.OrdinalMap;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
/**
* <code>NFGraphModelHolder</code> maintains an ordering over the models in a given NFGraph.<p>
*
* An {@link NFGraph} may contain one or more "connection models". A "connection model" is a grouping of the set of connections
* between nodes in the graph.<p>
*
* Connections added for a connection model will be visible only for that model. Use of multiple connection models will
* add a minimum of one byte per model-specific connection set per node. As a result, this feature should be used only
* when the number of connection models is and will remain low.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFGraphModelHolder implements Iterable<String> {
public static final String CONNECTION_MODEL_GLOBAL = "global";
|
private OrdinalMap<String> modelMap;
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/build/NFBuildGraphOrdinalSet.java
|
// Path: src/main/java/com/netflix/nfgraph/OrdinalIterator.java
// public interface OrdinalIterator {
//
// /**
// * This value will be returned from <code>nextOrdinal()</code> after the iteration is completed.
// */
// public static final int NO_MORE_ORDINALS = Integer.MAX_VALUE;
//
// /**
// * @return the next ordinal in this set.
// */
// public int nextOrdinal();
//
// /**
// * Rewinds this <code>OrdinalIterator</code> to the beginning of the set.
// */
// public void reset();
//
// /**
// * Obtain a copy of this <code>OrdinalIterator</code>. The returned <code>OrdinalIterator</code> will be reset to the beginning of the set.
// */
// public OrdinalIterator copy();
//
// /**
// * @return <code>true</code> if the ordinals returned from this set are guaranteed to be in ascending order. Returns <code>false</code> otherwise.
// */
// public boolean isOrdered();
//
// /**
// * An iterator which always return <code>OrdinalIterator.NO_MORE_ORDINALS</code>
// */
// public static final OrdinalIterator EMPTY_ITERATOR = new OrdinalIterator() {
// @Override public int nextOrdinal() { return NO_MORE_ORDINALS; }
//
// @Override public void reset() { }
//
// @Override public OrdinalIterator copy() { return this; }
//
// @Override public boolean isOrdered() { return true; }
// };
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/OrdinalSet.java
// public abstract class OrdinalSet {
//
// /**
// * Returns <code>true</code> when the specified value is contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(1)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet} and {@link NFBuildGraphOrdinalSet}
// */
// public abstract boolean contains(int value);
//
// /**
// * Returns <code>true</code> when all specified values are contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(m)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}, where <code>m</code> is the number of specified elements.<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet}, where <code>n</code> is the number of elements in the set.<br>
// * <code>O(n * m)</code> for {@link NFBuildGraphOrdinalSet}.
// */
// public boolean containsAll(int... values) {
// for(int value : values) {
// if(!contains(value))
// return false;
// }
// return true;
// }
//
// /**
// * Returns an array containing all elements in the set.
// */
// public int[] asArray() {
// int arr[] = new int[size()];
// OrdinalIterator iter = iterator();
//
// int ordinal = iter.nextOrdinal();
// int i = 0;
//
// while(ordinal != NO_MORE_ORDINALS) {
// arr[i++] = ordinal;
// ordinal = iter.nextOrdinal();
// }
//
// return arr;
// }
//
// /**
// * @return an {@link OrdinalIterator} over this set.
// */
// public abstract OrdinalIterator iterator();
//
// /**
// * @return the number of ordinals in this set.
// */
// public abstract int size();
//
// private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
//
// /**
// * An empty <code>OrdinalSet</code>.
// */
// public static final OrdinalSet EMPTY_SET = new OrdinalSet() {
// @Override public boolean contains(int value) { return false; }
//
// @Override public int[] asArray() { return EMPTY_ORDINAL_ARRAY; }
//
// @Override public OrdinalIterator iterator() { return EMPTY_ITERATOR; }
//
// @Override public int size() { return 0; }
// };
// }
|
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
/**
* And implementation of {@link OrdinalSet} returned for connections in an {@link NFBuildGraph}.
*/
public class NFBuildGraphOrdinalSet extends OrdinalSet {
private final int ordinals[];
private final int size;
public NFBuildGraphOrdinalSet(int ordinals[], int size) {
this.ordinals = ordinals;
this.size = size;
}
/**
* {@inheritDoc}
*/
@Override
public boolean contains(int value) {
for(int i=0;i<size;i++) {
if(ordinals[i] == value) {
return true;
}
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public int[] asArray() {
return Arrays.copyOf(ordinals, size);
}
/**
* {@inheritDoc}
*/
@Override
|
// Path: src/main/java/com/netflix/nfgraph/OrdinalIterator.java
// public interface OrdinalIterator {
//
// /**
// * This value will be returned from <code>nextOrdinal()</code> after the iteration is completed.
// */
// public static final int NO_MORE_ORDINALS = Integer.MAX_VALUE;
//
// /**
// * @return the next ordinal in this set.
// */
// public int nextOrdinal();
//
// /**
// * Rewinds this <code>OrdinalIterator</code> to the beginning of the set.
// */
// public void reset();
//
// /**
// * Obtain a copy of this <code>OrdinalIterator</code>. The returned <code>OrdinalIterator</code> will be reset to the beginning of the set.
// */
// public OrdinalIterator copy();
//
// /**
// * @return <code>true</code> if the ordinals returned from this set are guaranteed to be in ascending order. Returns <code>false</code> otherwise.
// */
// public boolean isOrdered();
//
// /**
// * An iterator which always return <code>OrdinalIterator.NO_MORE_ORDINALS</code>
// */
// public static final OrdinalIterator EMPTY_ITERATOR = new OrdinalIterator() {
// @Override public int nextOrdinal() { return NO_MORE_ORDINALS; }
//
// @Override public void reset() { }
//
// @Override public OrdinalIterator copy() { return this; }
//
// @Override public boolean isOrdered() { return true; }
// };
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/OrdinalSet.java
// public abstract class OrdinalSet {
//
// /**
// * Returns <code>true</code> when the specified value is contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(1)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet} and {@link NFBuildGraphOrdinalSet}
// */
// public abstract boolean contains(int value);
//
// /**
// * Returns <code>true</code> when all specified values are contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(m)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}, where <code>m</code> is the number of specified elements.<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet}, where <code>n</code> is the number of elements in the set.<br>
// * <code>O(n * m)</code> for {@link NFBuildGraphOrdinalSet}.
// */
// public boolean containsAll(int... values) {
// for(int value : values) {
// if(!contains(value))
// return false;
// }
// return true;
// }
//
// /**
// * Returns an array containing all elements in the set.
// */
// public int[] asArray() {
// int arr[] = new int[size()];
// OrdinalIterator iter = iterator();
//
// int ordinal = iter.nextOrdinal();
// int i = 0;
//
// while(ordinal != NO_MORE_ORDINALS) {
// arr[i++] = ordinal;
// ordinal = iter.nextOrdinal();
// }
//
// return arr;
// }
//
// /**
// * @return an {@link OrdinalIterator} over this set.
// */
// public abstract OrdinalIterator iterator();
//
// /**
// * @return the number of ordinals in this set.
// */
// public abstract int size();
//
// private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
//
// /**
// * An empty <code>OrdinalSet</code>.
// */
// public static final OrdinalSet EMPTY_SET = new OrdinalSet() {
// @Override public boolean contains(int value) { return false; }
//
// @Override public int[] asArray() { return EMPTY_ORDINAL_ARRAY; }
//
// @Override public OrdinalIterator iterator() { return EMPTY_ITERATOR; }
//
// @Override public int size() { return 0; }
// };
// }
// Path: src/main/java/com/netflix/nfgraph/build/NFBuildGraphOrdinalSet.java
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
/**
* And implementation of {@link OrdinalSet} returned for connections in an {@link NFBuildGraph}.
*/
public class NFBuildGraphOrdinalSet extends OrdinalSet {
private final int ordinals[];
private final int size;
public NFBuildGraphOrdinalSet(int ordinals[], int size) {
this.ordinals = ordinals;
this.size = size;
}
/**
* {@inheritDoc}
*/
@Override
public boolean contains(int value) {
for(int i=0;i<size;i++) {
if(ordinals[i] == value) {
return true;
}
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public int[] asArray() {
return Arrays.copyOf(ordinals, size);
}
/**
* {@inheritDoc}
*/
@Override
|
public OrdinalIterator iterator() {
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/compressed/SingleOrdinalSet.java
|
// Path: src/main/java/com/netflix/nfgraph/OrdinalIterator.java
// public interface OrdinalIterator {
//
// /**
// * This value will be returned from <code>nextOrdinal()</code> after the iteration is completed.
// */
// public static final int NO_MORE_ORDINALS = Integer.MAX_VALUE;
//
// /**
// * @return the next ordinal in this set.
// */
// public int nextOrdinal();
//
// /**
// * Rewinds this <code>OrdinalIterator</code> to the beginning of the set.
// */
// public void reset();
//
// /**
// * Obtain a copy of this <code>OrdinalIterator</code>. The returned <code>OrdinalIterator</code> will be reset to the beginning of the set.
// */
// public OrdinalIterator copy();
//
// /**
// * @return <code>true</code> if the ordinals returned from this set are guaranteed to be in ascending order. Returns <code>false</code> otherwise.
// */
// public boolean isOrdered();
//
// /**
// * An iterator which always return <code>OrdinalIterator.NO_MORE_ORDINALS</code>
// */
// public static final OrdinalIterator EMPTY_ITERATOR = new OrdinalIterator() {
// @Override public int nextOrdinal() { return NO_MORE_ORDINALS; }
//
// @Override public void reset() { }
//
// @Override public OrdinalIterator copy() { return this; }
//
// @Override public boolean isOrdered() { return true; }
// };
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/OrdinalSet.java
// public abstract class OrdinalSet {
//
// /**
// * Returns <code>true</code> when the specified value is contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(1)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet} and {@link NFBuildGraphOrdinalSet}
// */
// public abstract boolean contains(int value);
//
// /**
// * Returns <code>true</code> when all specified values are contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(m)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}, where <code>m</code> is the number of specified elements.<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet}, where <code>n</code> is the number of elements in the set.<br>
// * <code>O(n * m)</code> for {@link NFBuildGraphOrdinalSet}.
// */
// public boolean containsAll(int... values) {
// for(int value : values) {
// if(!contains(value))
// return false;
// }
// return true;
// }
//
// /**
// * Returns an array containing all elements in the set.
// */
// public int[] asArray() {
// int arr[] = new int[size()];
// OrdinalIterator iter = iterator();
//
// int ordinal = iter.nextOrdinal();
// int i = 0;
//
// while(ordinal != NO_MORE_ORDINALS) {
// arr[i++] = ordinal;
// ordinal = iter.nextOrdinal();
// }
//
// return arr;
// }
//
// /**
// * @return an {@link OrdinalIterator} over this set.
// */
// public abstract OrdinalIterator iterator();
//
// /**
// * @return the number of ordinals in this set.
// */
// public abstract int size();
//
// private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
//
// /**
// * An empty <code>OrdinalSet</code>.
// */
// public static final OrdinalSet EMPTY_SET = new OrdinalSet() {
// @Override public boolean contains(int value) { return false; }
//
// @Override public int[] asArray() { return EMPTY_ORDINAL_ARRAY; }
//
// @Override public OrdinalIterator iterator() { return EMPTY_ITERATOR; }
//
// @Override public int size() { return 0; }
// };
// }
|
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
/**
* An implementation of {@link OrdinalSet} containing a single ordinal.
*/
public class SingleOrdinalSet extends OrdinalSet {
private final int ordinal;
public SingleOrdinalSet(int ordinal) {
this.ordinal = ordinal;
}
@Override
public boolean contains(int value) {
return ordinal == value;
}
@Override
public int[] asArray() {
return new int[] { ordinal };
}
@Override
|
// Path: src/main/java/com/netflix/nfgraph/OrdinalIterator.java
// public interface OrdinalIterator {
//
// /**
// * This value will be returned from <code>nextOrdinal()</code> after the iteration is completed.
// */
// public static final int NO_MORE_ORDINALS = Integer.MAX_VALUE;
//
// /**
// * @return the next ordinal in this set.
// */
// public int nextOrdinal();
//
// /**
// * Rewinds this <code>OrdinalIterator</code> to the beginning of the set.
// */
// public void reset();
//
// /**
// * Obtain a copy of this <code>OrdinalIterator</code>. The returned <code>OrdinalIterator</code> will be reset to the beginning of the set.
// */
// public OrdinalIterator copy();
//
// /**
// * @return <code>true</code> if the ordinals returned from this set are guaranteed to be in ascending order. Returns <code>false</code> otherwise.
// */
// public boolean isOrdered();
//
// /**
// * An iterator which always return <code>OrdinalIterator.NO_MORE_ORDINALS</code>
// */
// public static final OrdinalIterator EMPTY_ITERATOR = new OrdinalIterator() {
// @Override public int nextOrdinal() { return NO_MORE_ORDINALS; }
//
// @Override public void reset() { }
//
// @Override public OrdinalIterator copy() { return this; }
//
// @Override public boolean isOrdered() { return true; }
// };
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/OrdinalSet.java
// public abstract class OrdinalSet {
//
// /**
// * Returns <code>true</code> when the specified value is contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(1)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet} and {@link NFBuildGraphOrdinalSet}
// */
// public abstract boolean contains(int value);
//
// /**
// * Returns <code>true</code> when all specified values are contained in this set. Depending on the implementation,
// * this operation will have one of two performance characteristics:<p>
// *
// * <code>O(m)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}, where <code>m</code> is the number of specified elements.<br>
// * <code>O(n)</code> for {@link CompactOrdinalSet}, where <code>n</code> is the number of elements in the set.<br>
// * <code>O(n * m)</code> for {@link NFBuildGraphOrdinalSet}.
// */
// public boolean containsAll(int... values) {
// for(int value : values) {
// if(!contains(value))
// return false;
// }
// return true;
// }
//
// /**
// * Returns an array containing all elements in the set.
// */
// public int[] asArray() {
// int arr[] = new int[size()];
// OrdinalIterator iter = iterator();
//
// int ordinal = iter.nextOrdinal();
// int i = 0;
//
// while(ordinal != NO_MORE_ORDINALS) {
// arr[i++] = ordinal;
// ordinal = iter.nextOrdinal();
// }
//
// return arr;
// }
//
// /**
// * @return an {@link OrdinalIterator} over this set.
// */
// public abstract OrdinalIterator iterator();
//
// /**
// * @return the number of ordinals in this set.
// */
// public abstract int size();
//
// private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
//
// /**
// * An empty <code>OrdinalSet</code>.
// */
// public static final OrdinalSet EMPTY_SET = new OrdinalSet() {
// @Override public boolean contains(int value) { return false; }
//
// @Override public int[] asArray() { return EMPTY_ORDINAL_ARRAY; }
//
// @Override public OrdinalIterator iterator() { return EMPTY_ITERATOR; }
//
// @Override public int size() { return 0; }
// };
// }
// Path: src/main/java/com/netflix/nfgraph/compressed/SingleOrdinalSet.java
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
/**
* An implementation of {@link OrdinalSet} containing a single ordinal.
*/
public class SingleOrdinalSet extends OrdinalSet {
private final int ordinal;
public SingleOrdinalSet(int ordinal) {
this.ordinal = ordinal;
}
@Override
public boolean contains(int value) {
return ordinal == value;
}
@Override
public int[] asArray() {
return new int[] { ordinal };
}
@Override
|
public OrdinalIterator iterator() {
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeCache.java
|
// Path: src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
// public class NFGraphModelHolder implements Iterable<String> {
//
// public static final String CONNECTION_MODEL_GLOBAL = "global";
//
// private OrdinalMap<String> modelMap;
//
// public NFGraphModelHolder() {
// modelMap = new OrdinalMap<String>();
// modelMap.add(CONNECTION_MODEL_GLOBAL);
// }
//
// public int size() {
// return modelMap.size();
// }
//
// public int getModelIndex(String connectionModel) {
// return modelMap.add(connectionModel);
// }
//
// public String getModel(int modelIndex) {
// return modelMap.get(modelIndex);
// }
//
// public Iterator<String> iterator() {
// return modelMap.iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFGraphSpec.java
// public class NFGraphSpec implements Iterable<NFNodeSpec> {
//
// private final Map<String, NFNodeSpec> nodeSpecs;
//
// /**
// * Instantiate a graph specification with no {@link NFNodeSpec}s.
// */
// public NFGraphSpec() {
// this.nodeSpecs = new HashMap<String, NFNodeSpec>();
// }
//
// /**
// * Instantiate a graph specification with the given {@link NFNodeSpec}.
// */
// public NFGraphSpec(NFNodeSpec... nodeTypes) {
// this();
//
// for(NFNodeSpec spec : nodeTypes) {
// addNodeSpec(spec);
// }
// }
//
// /**
// * @return the {@link NFNodeSpec} for the specified node type.
// */
// public NFNodeSpec getNodeSpec(String nodeType) {
// NFNodeSpec spec = nodeSpecs.get(nodeType);
// if(spec == null)
// throw new NFGraphException("Node spec " + nodeType + " is undefined");
// return spec;
// }
//
// /**
// * Add a node type to this graph specification.
// */
// public void addNodeSpec(NFNodeSpec nodeSpec) {
// nodeSpecs.put(nodeSpec.getNodeTypeName(), nodeSpec);
// }
//
// /**
// * @return the number of node types defined by this graph specification.
// */
// public int size() {
// return nodeSpecs.size();
// }
//
// /**
// * @return a {@link List} containing the names of each of the node types.
// */
// public List<String> getNodeTypes() {
// return new ArrayList<String>(nodeSpecs.keySet());
// }
//
// /**
// * Returns an {@link Iterator} over the {@link NFNodeSpec}s contained in this graph specification.
// */
// @Override
// public Iterator<NFNodeSpec> iterator() {
// return nodeSpecs.values().iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java
// public class NFNodeSpec implements Iterable<NFPropertySpec> {
//
// private final String nodeTypeName;
// private final NFPropertySpec propertySpecs[];
//
// private final int numSingleProperties;
// private final int numMultipleProperties;
//
// /**
// * The constructor for an <code>NFNodeSpec</code>.
// *
// * @param nodeTypeName the name of the node type
// * @param propertySpecs a complete listing of the properties available for this node type.
// */
// public NFNodeSpec(String nodeTypeName, NFPropertySpec... propertySpecs) {
// this.nodeTypeName = nodeTypeName;
// this.propertySpecs = propertySpecs;
//
// int numSingleProperties = 0;
// int numMultipleProperties = 0;
//
// for(NFPropertySpec propertySpec : propertySpecs) {
// propertySpec.setPropertyIndex(propertySpec.isSingle() ? numSingleProperties++ : numMultipleProperties++);
// }
//
// this.numSingleProperties = numSingleProperties;
// this.numMultipleProperties = numMultipleProperties;
// }
//
// public String getNodeTypeName() {
// return nodeTypeName;
// }
//
// public NFPropertySpec[] getPropertySpecs() {
// return propertySpecs;
// }
//
// public NFPropertySpec getPropertySpec(String propertyName) {
// for(NFPropertySpec spec : propertySpecs) {
// if(spec.getName().equals(propertyName))
// return spec;
// }
// throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
// }
//
// public int getNumSingleProperties() {
// return numSingleProperties;
// }
//
// public int getNumMultipleProperties() {
// return numMultipleProperties;
// }
//
// @Override
// public Iterator<NFPropertySpec> iterator() {
// return new ArrayIterator<NFPropertySpec>(propertySpecs);
// }
//
// }
|
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
public class NFBuildGraphNodeCache {
private final NFGraphSpec graphSpec;
|
// Path: src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
// public class NFGraphModelHolder implements Iterable<String> {
//
// public static final String CONNECTION_MODEL_GLOBAL = "global";
//
// private OrdinalMap<String> modelMap;
//
// public NFGraphModelHolder() {
// modelMap = new OrdinalMap<String>();
// modelMap.add(CONNECTION_MODEL_GLOBAL);
// }
//
// public int size() {
// return modelMap.size();
// }
//
// public int getModelIndex(String connectionModel) {
// return modelMap.add(connectionModel);
// }
//
// public String getModel(int modelIndex) {
// return modelMap.get(modelIndex);
// }
//
// public Iterator<String> iterator() {
// return modelMap.iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFGraphSpec.java
// public class NFGraphSpec implements Iterable<NFNodeSpec> {
//
// private final Map<String, NFNodeSpec> nodeSpecs;
//
// /**
// * Instantiate a graph specification with no {@link NFNodeSpec}s.
// */
// public NFGraphSpec() {
// this.nodeSpecs = new HashMap<String, NFNodeSpec>();
// }
//
// /**
// * Instantiate a graph specification with the given {@link NFNodeSpec}.
// */
// public NFGraphSpec(NFNodeSpec... nodeTypes) {
// this();
//
// for(NFNodeSpec spec : nodeTypes) {
// addNodeSpec(spec);
// }
// }
//
// /**
// * @return the {@link NFNodeSpec} for the specified node type.
// */
// public NFNodeSpec getNodeSpec(String nodeType) {
// NFNodeSpec spec = nodeSpecs.get(nodeType);
// if(spec == null)
// throw new NFGraphException("Node spec " + nodeType + " is undefined");
// return spec;
// }
//
// /**
// * Add a node type to this graph specification.
// */
// public void addNodeSpec(NFNodeSpec nodeSpec) {
// nodeSpecs.put(nodeSpec.getNodeTypeName(), nodeSpec);
// }
//
// /**
// * @return the number of node types defined by this graph specification.
// */
// public int size() {
// return nodeSpecs.size();
// }
//
// /**
// * @return a {@link List} containing the names of each of the node types.
// */
// public List<String> getNodeTypes() {
// return new ArrayList<String>(nodeSpecs.keySet());
// }
//
// /**
// * Returns an {@link Iterator} over the {@link NFNodeSpec}s contained in this graph specification.
// */
// @Override
// public Iterator<NFNodeSpec> iterator() {
// return nodeSpecs.values().iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java
// public class NFNodeSpec implements Iterable<NFPropertySpec> {
//
// private final String nodeTypeName;
// private final NFPropertySpec propertySpecs[];
//
// private final int numSingleProperties;
// private final int numMultipleProperties;
//
// /**
// * The constructor for an <code>NFNodeSpec</code>.
// *
// * @param nodeTypeName the name of the node type
// * @param propertySpecs a complete listing of the properties available for this node type.
// */
// public NFNodeSpec(String nodeTypeName, NFPropertySpec... propertySpecs) {
// this.nodeTypeName = nodeTypeName;
// this.propertySpecs = propertySpecs;
//
// int numSingleProperties = 0;
// int numMultipleProperties = 0;
//
// for(NFPropertySpec propertySpec : propertySpecs) {
// propertySpec.setPropertyIndex(propertySpec.isSingle() ? numSingleProperties++ : numMultipleProperties++);
// }
//
// this.numSingleProperties = numSingleProperties;
// this.numMultipleProperties = numMultipleProperties;
// }
//
// public String getNodeTypeName() {
// return nodeTypeName;
// }
//
// public NFPropertySpec[] getPropertySpecs() {
// return propertySpecs;
// }
//
// public NFPropertySpec getPropertySpec(String propertyName) {
// for(NFPropertySpec spec : propertySpecs) {
// if(spec.getName().equals(propertyName))
// return spec;
// }
// throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
// }
//
// public int getNumSingleProperties() {
// return numSingleProperties;
// }
//
// public int getNumMultipleProperties() {
// return numMultipleProperties;
// }
//
// @Override
// public Iterator<NFPropertySpec> iterator() {
// return new ArrayIterator<NFPropertySpec>(propertySpecs);
// }
//
// }
// Path: src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeCache.java
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
public class NFBuildGraphNodeCache {
private final NFGraphSpec graphSpec;
|
private final NFGraphModelHolder buildGraphModelHolder;
|
Netflix/netflix-graph
|
src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeCache.java
|
// Path: src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
// public class NFGraphModelHolder implements Iterable<String> {
//
// public static final String CONNECTION_MODEL_GLOBAL = "global";
//
// private OrdinalMap<String> modelMap;
//
// public NFGraphModelHolder() {
// modelMap = new OrdinalMap<String>();
// modelMap.add(CONNECTION_MODEL_GLOBAL);
// }
//
// public int size() {
// return modelMap.size();
// }
//
// public int getModelIndex(String connectionModel) {
// return modelMap.add(connectionModel);
// }
//
// public String getModel(int modelIndex) {
// return modelMap.get(modelIndex);
// }
//
// public Iterator<String> iterator() {
// return modelMap.iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFGraphSpec.java
// public class NFGraphSpec implements Iterable<NFNodeSpec> {
//
// private final Map<String, NFNodeSpec> nodeSpecs;
//
// /**
// * Instantiate a graph specification with no {@link NFNodeSpec}s.
// */
// public NFGraphSpec() {
// this.nodeSpecs = new HashMap<String, NFNodeSpec>();
// }
//
// /**
// * Instantiate a graph specification with the given {@link NFNodeSpec}.
// */
// public NFGraphSpec(NFNodeSpec... nodeTypes) {
// this();
//
// for(NFNodeSpec spec : nodeTypes) {
// addNodeSpec(spec);
// }
// }
//
// /**
// * @return the {@link NFNodeSpec} for the specified node type.
// */
// public NFNodeSpec getNodeSpec(String nodeType) {
// NFNodeSpec spec = nodeSpecs.get(nodeType);
// if(spec == null)
// throw new NFGraphException("Node spec " + nodeType + " is undefined");
// return spec;
// }
//
// /**
// * Add a node type to this graph specification.
// */
// public void addNodeSpec(NFNodeSpec nodeSpec) {
// nodeSpecs.put(nodeSpec.getNodeTypeName(), nodeSpec);
// }
//
// /**
// * @return the number of node types defined by this graph specification.
// */
// public int size() {
// return nodeSpecs.size();
// }
//
// /**
// * @return a {@link List} containing the names of each of the node types.
// */
// public List<String> getNodeTypes() {
// return new ArrayList<String>(nodeSpecs.keySet());
// }
//
// /**
// * Returns an {@link Iterator} over the {@link NFNodeSpec}s contained in this graph specification.
// */
// @Override
// public Iterator<NFNodeSpec> iterator() {
// return nodeSpecs.values().iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java
// public class NFNodeSpec implements Iterable<NFPropertySpec> {
//
// private final String nodeTypeName;
// private final NFPropertySpec propertySpecs[];
//
// private final int numSingleProperties;
// private final int numMultipleProperties;
//
// /**
// * The constructor for an <code>NFNodeSpec</code>.
// *
// * @param nodeTypeName the name of the node type
// * @param propertySpecs a complete listing of the properties available for this node type.
// */
// public NFNodeSpec(String nodeTypeName, NFPropertySpec... propertySpecs) {
// this.nodeTypeName = nodeTypeName;
// this.propertySpecs = propertySpecs;
//
// int numSingleProperties = 0;
// int numMultipleProperties = 0;
//
// for(NFPropertySpec propertySpec : propertySpecs) {
// propertySpec.setPropertyIndex(propertySpec.isSingle() ? numSingleProperties++ : numMultipleProperties++);
// }
//
// this.numSingleProperties = numSingleProperties;
// this.numMultipleProperties = numMultipleProperties;
// }
//
// public String getNodeTypeName() {
// return nodeTypeName;
// }
//
// public NFPropertySpec[] getPropertySpecs() {
// return propertySpecs;
// }
//
// public NFPropertySpec getPropertySpec(String propertyName) {
// for(NFPropertySpec spec : propertySpecs) {
// if(spec.getName().equals(propertyName))
// return spec;
// }
// throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
// }
//
// public int getNumSingleProperties() {
// return numSingleProperties;
// }
//
// public int getNumMultipleProperties() {
// return numMultipleProperties;
// }
//
// @Override
// public Iterator<NFPropertySpec> iterator() {
// return new ArrayIterator<NFPropertySpec>(propertySpecs);
// }
//
// }
|
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
public class NFBuildGraphNodeCache {
private final NFGraphSpec graphSpec;
private final NFGraphModelHolder buildGraphModelHolder;
private final Map<String,NFBuildGraphNodeList> nodesByOrdinal;
NFBuildGraphNodeCache(NFGraphSpec graphSpec, NFGraphModelHolder modelHolder) {
this.nodesByOrdinal = new HashMap<String, NFBuildGraphNodeList>();
this.graphSpec = graphSpec;
this.buildGraphModelHolder = modelHolder;
}
NFBuildGraphNode getNode(String nodeType, int ordinal) {
NFBuildGraphNodeList nodes = getNodes(nodeType);
|
// Path: src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
// public class NFGraphModelHolder implements Iterable<String> {
//
// public static final String CONNECTION_MODEL_GLOBAL = "global";
//
// private OrdinalMap<String> modelMap;
//
// public NFGraphModelHolder() {
// modelMap = new OrdinalMap<String>();
// modelMap.add(CONNECTION_MODEL_GLOBAL);
// }
//
// public int size() {
// return modelMap.size();
// }
//
// public int getModelIndex(String connectionModel) {
// return modelMap.add(connectionModel);
// }
//
// public String getModel(int modelIndex) {
// return modelMap.get(modelIndex);
// }
//
// public Iterator<String> iterator() {
// return modelMap.iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFGraphSpec.java
// public class NFGraphSpec implements Iterable<NFNodeSpec> {
//
// private final Map<String, NFNodeSpec> nodeSpecs;
//
// /**
// * Instantiate a graph specification with no {@link NFNodeSpec}s.
// */
// public NFGraphSpec() {
// this.nodeSpecs = new HashMap<String, NFNodeSpec>();
// }
//
// /**
// * Instantiate a graph specification with the given {@link NFNodeSpec}.
// */
// public NFGraphSpec(NFNodeSpec... nodeTypes) {
// this();
//
// for(NFNodeSpec spec : nodeTypes) {
// addNodeSpec(spec);
// }
// }
//
// /**
// * @return the {@link NFNodeSpec} for the specified node type.
// */
// public NFNodeSpec getNodeSpec(String nodeType) {
// NFNodeSpec spec = nodeSpecs.get(nodeType);
// if(spec == null)
// throw new NFGraphException("Node spec " + nodeType + " is undefined");
// return spec;
// }
//
// /**
// * Add a node type to this graph specification.
// */
// public void addNodeSpec(NFNodeSpec nodeSpec) {
// nodeSpecs.put(nodeSpec.getNodeTypeName(), nodeSpec);
// }
//
// /**
// * @return the number of node types defined by this graph specification.
// */
// public int size() {
// return nodeSpecs.size();
// }
//
// /**
// * @return a {@link List} containing the names of each of the node types.
// */
// public List<String> getNodeTypes() {
// return new ArrayList<String>(nodeSpecs.keySet());
// }
//
// /**
// * Returns an {@link Iterator} over the {@link NFNodeSpec}s contained in this graph specification.
// */
// @Override
// public Iterator<NFNodeSpec> iterator() {
// return nodeSpecs.values().iterator();
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java
// public class NFNodeSpec implements Iterable<NFPropertySpec> {
//
// private final String nodeTypeName;
// private final NFPropertySpec propertySpecs[];
//
// private final int numSingleProperties;
// private final int numMultipleProperties;
//
// /**
// * The constructor for an <code>NFNodeSpec</code>.
// *
// * @param nodeTypeName the name of the node type
// * @param propertySpecs a complete listing of the properties available for this node type.
// */
// public NFNodeSpec(String nodeTypeName, NFPropertySpec... propertySpecs) {
// this.nodeTypeName = nodeTypeName;
// this.propertySpecs = propertySpecs;
//
// int numSingleProperties = 0;
// int numMultipleProperties = 0;
//
// for(NFPropertySpec propertySpec : propertySpecs) {
// propertySpec.setPropertyIndex(propertySpec.isSingle() ? numSingleProperties++ : numMultipleProperties++);
// }
//
// this.numSingleProperties = numSingleProperties;
// this.numMultipleProperties = numMultipleProperties;
// }
//
// public String getNodeTypeName() {
// return nodeTypeName;
// }
//
// public NFPropertySpec[] getPropertySpecs() {
// return propertySpecs;
// }
//
// public NFPropertySpec getPropertySpec(String propertyName) {
// for(NFPropertySpec spec : propertySpecs) {
// if(spec.getName().equals(propertyName))
// return spec;
// }
// throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
// }
//
// public int getNumSingleProperties() {
// return numSingleProperties;
// }
//
// public int getNumMultipleProperties() {
// return numMultipleProperties;
// }
//
// @Override
// public Iterator<NFPropertySpec> iterator() {
// return new ArrayIterator<NFPropertySpec>(propertySpecs);
// }
//
// }
// Path: src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeCache.java
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
public class NFBuildGraphNodeCache {
private final NFGraphSpec graphSpec;
private final NFGraphModelHolder buildGraphModelHolder;
private final Map<String,NFBuildGraphNodeList> nodesByOrdinal;
NFBuildGraphNodeCache(NFGraphSpec graphSpec, NFGraphModelHolder modelHolder) {
this.nodesByOrdinal = new HashMap<String, NFBuildGraphNodeList>();
this.graphSpec = graphSpec;
this.buildGraphModelHolder = modelHolder;
}
NFBuildGraphNode getNode(String nodeType, int ordinal) {
NFBuildGraphNodeList nodes = getNodes(nodeType);
|
NFNodeSpec nodeSpec = graphSpec.getNodeSpec(nodeType);
|
Netflix/netflix-graph
|
src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java
// public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
//
// private final Map<String, int[]>pointersByOrdinal;
//
// public NFCompressedGraphIntPointers() {
// this.pointersByOrdinal = new HashMap<String, int[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// int pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length) {
// if(pointers[ordinal] == -1)
// return -1;
// return 0xFFFFFFFFL & pointers[ordinal];
// }
// return -1;
// }
//
// public void addPointers(String nodeType, int pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// @Override
// public Map<String, long[]> asMap() {
// Map<String, long[]> map = new HashMap<String, long[]>();
//
// for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
// map.put(entry.getKey(), toLongArray(entry.getValue()));
// }
//
// return map;
// }
//
// private long[] toLongArray(int[] arr) {
// long l[] = new long[arr.length];
//
// for(int i=0;i<arr.length;i++) {
// l[i] = arr[i];
// }
//
// return l;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java
// public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
//
// private final Map<String, long[]>pointersByOrdinal;
//
// public NFCompressedGraphLongPointers() {
// this.pointersByOrdinal = new HashMap<String, long[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// long pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length)
// return pointers[ordinal];
// return -1;
// }
//
// public void addPointers(String nodeType, long pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// public Map<String, long[]> asMap() {
// return pointersByOrdinal;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
|
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java
// public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
//
// private final Map<String, int[]>pointersByOrdinal;
//
// public NFCompressedGraphIntPointers() {
// this.pointersByOrdinal = new HashMap<String, int[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// int pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length) {
// if(pointers[ordinal] == -1)
// return -1;
// return 0xFFFFFFFFL & pointers[ordinal];
// }
// return -1;
// }
//
// public void addPointers(String nodeType, int pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// @Override
// public Map<String, long[]> asMap() {
// Map<String, long[]> map = new HashMap<String, long[]>();
//
// for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
// map.put(entry.getKey(), toLongArray(entry.getValue()));
// }
//
// return map;
// }
//
// private long[] toLongArray(int[] arr) {
// long l[] = new long[arr.length];
//
// for(int i=0;i<arr.length;i++) {
// l[i] = arr[i];
// }
//
// return l;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java
// public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
//
// private final Map<String, long[]>pointersByOrdinal;
//
// public NFCompressedGraphLongPointers() {
// this.pointersByOrdinal = new HashMap<String, long[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// long pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length)
// return pointers[ordinal];
// return -1;
// }
//
// public void addPointers(String nodeType, long pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// public Map<String, long[]> asMap() {
// return pointersByOrdinal;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
// Path: src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
|
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
|
Netflix/netflix-graph
|
src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java
// public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
//
// private final Map<String, int[]>pointersByOrdinal;
//
// public NFCompressedGraphIntPointers() {
// this.pointersByOrdinal = new HashMap<String, int[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// int pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length) {
// if(pointers[ordinal] == -1)
// return -1;
// return 0xFFFFFFFFL & pointers[ordinal];
// }
// return -1;
// }
//
// public void addPointers(String nodeType, int pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// @Override
// public Map<String, long[]> asMap() {
// Map<String, long[]> map = new HashMap<String, long[]>();
//
// for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
// map.put(entry.getKey(), toLongArray(entry.getValue()));
// }
//
// return map;
// }
//
// private long[] toLongArray(int[] arr) {
// long l[] = new long[arr.length];
//
// for(int i=0;i<arr.length;i++) {
// l[i] = arr[i];
// }
//
// return l;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java
// public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
//
// private final Map<String, long[]>pointersByOrdinal;
//
// public NFCompressedGraphLongPointers() {
// this.pointersByOrdinal = new HashMap<String, long[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// long pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length)
// return pointers[ordinal];
// return -1;
// }
//
// public void addPointers(String nodeType, long pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// public Map<String, long[]> asMap() {
// return pointersByOrdinal;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
|
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 2);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java
// public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
//
// private final Map<String, int[]>pointersByOrdinal;
//
// public NFCompressedGraphIntPointers() {
// this.pointersByOrdinal = new HashMap<String, int[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// int pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length) {
// if(pointers[ordinal] == -1)
// return -1;
// return 0xFFFFFFFFL & pointers[ordinal];
// }
// return -1;
// }
//
// public void addPointers(String nodeType, int pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// @Override
// public Map<String, long[]> asMap() {
// Map<String, long[]> map = new HashMap<String, long[]>();
//
// for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
// map.put(entry.getKey(), toLongArray(entry.getValue()));
// }
//
// return map;
// }
//
// private long[] toLongArray(int[] arr) {
// long l[] = new long[arr.length];
//
// for(int i=0;i<arr.length;i++) {
// l[i] = arr[i];
// }
//
// return l;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java
// public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
//
// private final Map<String, long[]>pointersByOrdinal;
//
// public NFCompressedGraphLongPointers() {
// this.pointersByOrdinal = new HashMap<String, long[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// long pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length)
// return pointers[ordinal];
// return -1;
// }
//
// public void addPointers(String nodeType, long pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// public Map<String, long[]> asMap() {
// return pointersByOrdinal;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
// Path: src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 2);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
|
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
|
Netflix/netflix-graph
|
src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java
// public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
//
// private final Map<String, int[]>pointersByOrdinal;
//
// public NFCompressedGraphIntPointers() {
// this.pointersByOrdinal = new HashMap<String, int[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// int pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length) {
// if(pointers[ordinal] == -1)
// return -1;
// return 0xFFFFFFFFL & pointers[ordinal];
// }
// return -1;
// }
//
// public void addPointers(String nodeType, int pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// @Override
// public Map<String, long[]> asMap() {
// Map<String, long[]> map = new HashMap<String, long[]>();
//
// for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
// map.put(entry.getKey(), toLongArray(entry.getValue()));
// }
//
// return map;
// }
//
// private long[] toLongArray(int[] arr) {
// long l[] = new long[arr.length];
//
// for(int i=0;i<arr.length;i++) {
// l[i] = arr[i];
// }
//
// return l;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java
// public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
//
// private final Map<String, long[]>pointersByOrdinal;
//
// public NFCompressedGraphLongPointers() {
// this.pointersByOrdinal = new HashMap<String, long[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// long pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length)
// return pointers[ordinal];
// return -1;
// }
//
// public void addPointers(String nodeType, long pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// public Map<String, long[]> asMap() {
// return pointersByOrdinal;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
|
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 2);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
|
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java
// public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
//
// private final Map<String, int[]>pointersByOrdinal;
//
// public NFCompressedGraphIntPointers() {
// this.pointersByOrdinal = new HashMap<String, int[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// int pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length) {
// if(pointers[ordinal] == -1)
// return -1;
// return 0xFFFFFFFFL & pointers[ordinal];
// }
// return -1;
// }
//
// public void addPointers(String nodeType, int pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// @Override
// public Map<String, long[]> asMap() {
// Map<String, long[]> map = new HashMap<String, long[]>();
//
// for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
// map.put(entry.getKey(), toLongArray(entry.getValue()));
// }
//
// return map;
// }
//
// private long[] toLongArray(int[] arr) {
// long l[] = new long[arr.length];
//
// for(int i=0;i<arr.length;i++) {
// l[i] = arr[i];
// }
//
// return l;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java
// public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
//
// private final Map<String, long[]>pointersByOrdinal;
//
// public NFCompressedGraphLongPointers() {
// this.pointersByOrdinal = new HashMap<String, long[]>();
// }
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal) {
// long pointers[] = pointersByOrdinal.get(nodeType);
// if(pointers == null)
// throw new NFGraphException("Undefined node type: " + nodeType);
// if(ordinal < pointers.length)
// return pointers[ordinal];
// return -1;
// }
//
// public void addPointers(String nodeType, long pointers[]) {
// pointersByOrdinal.put(nodeType, pointers);
// }
//
// public int numPointers(String nodeType) {
// return pointersByOrdinal.get(nodeType).length;
// }
//
// public Map<String, long[]> asMap() {
// return pointersByOrdinal;
// }
//
// }
//
// Path: src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java
// public interface NFCompressedGraphPointers {
//
// /**
// * @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
// */
// public long getPointer(String nodeType, int ordinal);
//
// public int numPointers(String nodeType);
//
// public Map<String, long[]> asMap();
//
// }
// Path: src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 2);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
|
Assert.assertTrue(deserialized instanceof NFCompressedGraphIntPointers);
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/constant/MenuItems.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/bean/ThingMenuItem.java
// public class ThingMenuItem {
// private FragmentName fragmentName;
//
// private int drawableId;
//
// private String name;
//
// private boolean isSelected;
//
// public FragmentName getFragmentName() {
// return fragmentName;
// }
//
// public void setFragmentName(FragmentName fragmentName) {
// this.fragmentName = fragmentName;
// }
//
// public boolean isSelected() {
// return isSelected;
// }
//
// public void setSelected(boolean isSelected) {
// this.isSelected = isSelected;
// }
//
// public int getDrawableId() {
// return drawableId;
// }
//
// public void setDrawableId(int drawableId) {
// this.drawableId = drawableId;
// }
//
// public String getName() {
// return name;
// }
//
// public void setName(String name) {
// this.name = name;
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
|
import com.zhenghuiyan.todaything.R;
import com.zhenghuiyan.todaything.bean.ThingMenuItem;
import com.zhenghuiyan.todaything.util.MiscUtil;
import java.util.ArrayList;
import java.util.List;
|
package com.zhenghuiyan.todaything.constant;
/**
* Created by huihui on 2015/8/23.
*/
public final class MenuItems {
public static List<ThingMenuItem> sMenuItems = null;
private MenuItems() {}
public static List<ThingMenuItem> getMenuItems() {
if (sMenuItems == null) {
sMenuItems = new ArrayList<ThingMenuItem>();
ThingMenuItem item = new ThingMenuItem();
//day
item.setFragmentName(FragmentName.Today);
item.setDrawableId(R.drawable.ic_event_grey600_18dp);
item.setSelected(true);
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/bean/ThingMenuItem.java
// public class ThingMenuItem {
// private FragmentName fragmentName;
//
// private int drawableId;
//
// private String name;
//
// private boolean isSelected;
//
// public FragmentName getFragmentName() {
// return fragmentName;
// }
//
// public void setFragmentName(FragmentName fragmentName) {
// this.fragmentName = fragmentName;
// }
//
// public boolean isSelected() {
// return isSelected;
// }
//
// public void setSelected(boolean isSelected) {
// this.isSelected = isSelected;
// }
//
// public int getDrawableId() {
// return drawableId;
// }
//
// public void setDrawableId(int drawableId) {
// this.drawableId = drawableId;
// }
//
// public String getName() {
// return name;
// }
//
// public void setName(String name) {
// this.name = name;
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/constant/MenuItems.java
import com.zhenghuiyan.todaything.R;
import com.zhenghuiyan.todaything.bean.ThingMenuItem;
import com.zhenghuiyan.todaything.util.MiscUtil;
import java.util.ArrayList;
import java.util.List;
package com.zhenghuiyan.todaything.constant;
/**
* Created by huihui on 2015/8/23.
*/
public final class MenuItems {
public static List<ThingMenuItem> sMenuItems = null;
private MenuItems() {}
public static List<ThingMenuItem> getMenuItems() {
if (sMenuItems == null) {
sMenuItems = new ArrayList<ThingMenuItem>();
ThingMenuItem item = new ThingMenuItem();
//day
item.setFragmentName(FragmentName.Today);
item.setDrawableId(R.drawable.ic_event_grey600_18dp);
item.setSelected(true);
|
item.setName(MiscUtil.getAppContext().getResources().getString(R.string.menu_day));
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/task/ProgressDialogAsyncTask.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/UIUtil.java
// public class UIUtil {
//
// private UIUtil() {}
//
// /**
// * return size.x size.y
// * */
// private static Point getScreenSize(Activity a) {
// if (a == null) {
// return null;
// }
// Display display = a.getWindowManager().getDefaultDisplay();
// Point size = new Point();
// display.getSize(size);
//
// return size;
// }
//
// public static int getScreenWidth(Activity a) {
// int width = 0;
// Point p = getScreenSize(a);
//
// if (p != null) {
// width = p.x;
// }
//
// return width;
// }
//
// public static int getScreenHeight(Activity a) {
// int height = 0;
// Point p = getScreenSize(a);
//
// if (p != null) {
// height = p.y;
// }
//
// return height;
// }
//
// /**
// * 根据手机的分辨率从 dp 的单位 转成为 px(像素)
// */
// public static int dip2px(Context context, float dpValue) {
// final float scale = context.getResources().getDisplayMetrics().density;
// return (int) (dpValue * scale + 0.5f);
// }
//
// /**
// * 根据手机的分辨率从 px(像素) 的单位 转成为 dp
// */
// public static int px2dip(Context context, float pxValue) {
// final float scale = context.getResources().getDisplayMetrics().density;
// return (int) (pxValue / scale + 0.5f);
// }
//
// /**
// * show dialog
// *
// * */
// public static ProgressDialog showProgressDialog(Activity activity) {
// //activity = modifyDialogContext(activity);
//
// ProgressDialog dialog = new ProgressDialog(activity, activity.getResources().getString(R.string.loading));
// dialog.setCancelable(true);
// if (activity.isFinishing() == false) {
// dialog.show();
// }
// return dialog;
// }
//
// public static void showSnackBar(Activity activity, String message, SnackBar.OnHideListener onHideListener) {
// SnackBar snackBar = new SnackBar(activity, message);
// snackBar.show();
// if (onHideListener != null) {
// snackBar.setOnhideListener(onHideListener);
// }
// }
// }
|
import android.app.Activity;
import android.os.AsyncTask;
import com.gc.materialdesign.widgets.ProgressDialog;
import com.zhenghuiyan.todaything.util.UIUtil;
|
package com.zhenghuiyan.todaything.task;
public abstract class ProgressDialogAsyncTask extends AsyncTask<Void, Void, Void> {
ProgressDialog mDialog;
protected Activity mActivity;
boolean mShowDialog = true;
protected ProgressDialogAsyncTask(Activity activity) {
mActivity = activity;
}
protected ProgressDialogAsyncTask(Activity activity, boolean showDialog) {
mActivity = activity;
mShowDialog = showDialog;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
if (mShowDialog) {
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/UIUtil.java
// public class UIUtil {
//
// private UIUtil() {}
//
// /**
// * return size.x size.y
// * */
// private static Point getScreenSize(Activity a) {
// if (a == null) {
// return null;
// }
// Display display = a.getWindowManager().getDefaultDisplay();
// Point size = new Point();
// display.getSize(size);
//
// return size;
// }
//
// public static int getScreenWidth(Activity a) {
// int width = 0;
// Point p = getScreenSize(a);
//
// if (p != null) {
// width = p.x;
// }
//
// return width;
// }
//
// public static int getScreenHeight(Activity a) {
// int height = 0;
// Point p = getScreenSize(a);
//
// if (p != null) {
// height = p.y;
// }
//
// return height;
// }
//
// /**
// * 根据手机的分辨率从 dp 的单位 转成为 px(像素)
// */
// public static int dip2px(Context context, float dpValue) {
// final float scale = context.getResources().getDisplayMetrics().density;
// return (int) (dpValue * scale + 0.5f);
// }
//
// /**
// * 根据手机的分辨率从 px(像素) 的单位 转成为 dp
// */
// public static int px2dip(Context context, float pxValue) {
// final float scale = context.getResources().getDisplayMetrics().density;
// return (int) (pxValue / scale + 0.5f);
// }
//
// /**
// * show dialog
// *
// * */
// public static ProgressDialog showProgressDialog(Activity activity) {
// //activity = modifyDialogContext(activity);
//
// ProgressDialog dialog = new ProgressDialog(activity, activity.getResources().getString(R.string.loading));
// dialog.setCancelable(true);
// if (activity.isFinishing() == false) {
// dialog.show();
// }
// return dialog;
// }
//
// public static void showSnackBar(Activity activity, String message, SnackBar.OnHideListener onHideListener) {
// SnackBar snackBar = new SnackBar(activity, message);
// snackBar.show();
// if (onHideListener != null) {
// snackBar.setOnhideListener(onHideListener);
// }
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/task/ProgressDialogAsyncTask.java
import android.app.Activity;
import android.os.AsyncTask;
import com.gc.materialdesign.widgets.ProgressDialog;
import com.zhenghuiyan.todaything.util.UIUtil;
package com.zhenghuiyan.todaything.task;
public abstract class ProgressDialogAsyncTask extends AsyncTask<Void, Void, Void> {
ProgressDialog mDialog;
protected Activity mActivity;
boolean mShowDialog = true;
protected ProgressDialogAsyncTask(Activity activity) {
mActivity = activity;
}
protected ProgressDialogAsyncTask(Activity activity, boolean showDialog) {
mActivity = activity;
mShowDialog = showDialog;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
if (mShowDialog) {
|
mDialog = UIUtil.showProgressDialog(mActivity);
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/adapter/MenuListViewAdapter.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/bean/ThingMenuItem.java
// public class ThingMenuItem {
// private FragmentName fragmentName;
//
// private int drawableId;
//
// private String name;
//
// private boolean isSelected;
//
// public FragmentName getFragmentName() {
// return fragmentName;
// }
//
// public void setFragmentName(FragmentName fragmentName) {
// this.fragmentName = fragmentName;
// }
//
// public boolean isSelected() {
// return isSelected;
// }
//
// public void setSelected(boolean isSelected) {
// this.isSelected = isSelected;
// }
//
// public int getDrawableId() {
// return drawableId;
// }
//
// public void setDrawableId(int drawableId) {
// this.drawableId = drawableId;
// }
//
// public String getName() {
// return name;
// }
//
// public void setName(String name) {
// this.name = name;
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
|
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.zhenghuiyan.todaything.R;
import com.zhenghuiyan.todaything.bean.ThingMenuItem;
import com.zhenghuiyan.todaything.util.MiscUtil;
import java.util.ArrayList;
import java.util.List;
|
package com.zhenghuiyan.todaything.adapter;
/**
* Created by zhenghuiyan on 2015/1/28.
*/
public class MenuListViewAdapter extends BaseAdapter{
private List<ThingMenuItem> mItems= null;
public MenuListViewAdapter(List<ThingMenuItem> items) {
this.mItems = items;
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public Object getItem(int position) {
return mItems.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder = null;
if (convertView == null) {
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/bean/ThingMenuItem.java
// public class ThingMenuItem {
// private FragmentName fragmentName;
//
// private int drawableId;
//
// private String name;
//
// private boolean isSelected;
//
// public FragmentName getFragmentName() {
// return fragmentName;
// }
//
// public void setFragmentName(FragmentName fragmentName) {
// this.fragmentName = fragmentName;
// }
//
// public boolean isSelected() {
// return isSelected;
// }
//
// public void setSelected(boolean isSelected) {
// this.isSelected = isSelected;
// }
//
// public int getDrawableId() {
// return drawableId;
// }
//
// public void setDrawableId(int drawableId) {
// this.drawableId = drawableId;
// }
//
// public String getName() {
// return name;
// }
//
// public void setName(String name) {
// this.name = name;
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/adapter/MenuListViewAdapter.java
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.zhenghuiyan.todaything.R;
import com.zhenghuiyan.todaything.bean.ThingMenuItem;
import com.zhenghuiyan.todaything.util.MiscUtil;
import java.util.ArrayList;
import java.util.List;
package com.zhenghuiyan.todaything.adapter;
/**
* Created by zhenghuiyan on 2015/1/28.
*/
public class MenuListViewAdapter extends BaseAdapter{
private List<ThingMenuItem> mItems= null;
public MenuListViewAdapter(List<ThingMenuItem> items) {
this.mItems = items;
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public Object getItem(int position) {
return mItems.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder = null;
if (convertView == null) {
|
convertView = LayoutInflater.from(MiscUtil.getAppContext()).inflate(R.layout.listview_menu_item, null);
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/bean/Thing.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/data/ScheduleContract.java
// public final class ScheduleContract {
//
// public ScheduleContract() {}
//
// public static abstract class ScheduleContractEntry implements BaseColumns {
// public static final String TABLE_NAME = "Schedule";
//
// public static final String COLUMN_NAME_ID = "id";
//
// public static final String COLUMN_NAME_STIME = "s_time";
//
// public static final String COLUMN_NAME_CONTENT = "content";
//
// public static final String COLUMN_NAME_FROM_TIME = "from_time";
//
// public static final String COLUMN_NAME_TO_TIME = "to_time";
//
// public static final String COLUMN_NAME_COMPLETE_DATE = "complete_date";
//
// public static final String COLUMN_NAME_THING_ID = "thing_id";
//
// public static final String COLUMN_NAME_WEEK_NUM = "week_num";
//
// public static final String DEFAULT_DATE = "2000-01-01";
//
//
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/DateUtil.java
// public class DateUtil {
// public static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
//
// private DateUtil() {}
//
// public static String dateToString(Date d) {
// return dateFormat.format(d);
// }
//
// public static Date parseDate(String s) {
// try {
// return dateFormat.parse(s);
// } catch (ParseException e) {
// e.printStackTrace();
// }
//
// return null;
// }
//
// public static int getDayOfWeek(Date date) {
// if (date == null) {
// return -1;
// }
// Calendar c = Calendar.getInstance();
// c.setTime(date);
//
// return c.get(Calendar.DAY_OF_WEEK);
// }
// }
|
import com.zhenghuiyan.todaything.data.ScheduleContract;
import com.zhenghuiyan.todaything.util.DateUtil;
import java.io.Serializable;
import java.util.Date;
|
package com.zhenghuiyan.todaything.bean;
/**
* Created by zhenghuiyan on 2015/1/27.
*/
public class Thing implements Comparable<Thing>, Serializable{
private int id = 0;
private String sTime;
private String content;
private HourAndMinute fromTime;
private HourAndMinute toTime;
private int thing_id;
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/data/ScheduleContract.java
// public final class ScheduleContract {
//
// public ScheduleContract() {}
//
// public static abstract class ScheduleContractEntry implements BaseColumns {
// public static final String TABLE_NAME = "Schedule";
//
// public static final String COLUMN_NAME_ID = "id";
//
// public static final String COLUMN_NAME_STIME = "s_time";
//
// public static final String COLUMN_NAME_CONTENT = "content";
//
// public static final String COLUMN_NAME_FROM_TIME = "from_time";
//
// public static final String COLUMN_NAME_TO_TIME = "to_time";
//
// public static final String COLUMN_NAME_COMPLETE_DATE = "complete_date";
//
// public static final String COLUMN_NAME_THING_ID = "thing_id";
//
// public static final String COLUMN_NAME_WEEK_NUM = "week_num";
//
// public static final String DEFAULT_DATE = "2000-01-01";
//
//
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/DateUtil.java
// public class DateUtil {
// public static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
//
// private DateUtil() {}
//
// public static String dateToString(Date d) {
// return dateFormat.format(d);
// }
//
// public static Date parseDate(String s) {
// try {
// return dateFormat.parse(s);
// } catch (ParseException e) {
// e.printStackTrace();
// }
//
// return null;
// }
//
// public static int getDayOfWeek(Date date) {
// if (date == null) {
// return -1;
// }
// Calendar c = Calendar.getInstance();
// c.setTime(date);
//
// return c.get(Calendar.DAY_OF_WEEK);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/bean/Thing.java
import com.zhenghuiyan.todaything.data.ScheduleContract;
import com.zhenghuiyan.todaything.util.DateUtil;
import java.io.Serializable;
import java.util.Date;
package com.zhenghuiyan.todaything.bean;
/**
* Created by zhenghuiyan on 2015/1/27.
*/
public class Thing implements Comparable<Thing>, Serializable{
private int id = 0;
private String sTime;
private String content;
private HourAndMinute fromTime;
private HourAndMinute toTime;
private int thing_id;
|
private String complete_date = ScheduleContract.ScheduleContractEntry.DEFAULT_DATE;
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/bean/Thing.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/data/ScheduleContract.java
// public final class ScheduleContract {
//
// public ScheduleContract() {}
//
// public static abstract class ScheduleContractEntry implements BaseColumns {
// public static final String TABLE_NAME = "Schedule";
//
// public static final String COLUMN_NAME_ID = "id";
//
// public static final String COLUMN_NAME_STIME = "s_time";
//
// public static final String COLUMN_NAME_CONTENT = "content";
//
// public static final String COLUMN_NAME_FROM_TIME = "from_time";
//
// public static final String COLUMN_NAME_TO_TIME = "to_time";
//
// public static final String COLUMN_NAME_COMPLETE_DATE = "complete_date";
//
// public static final String COLUMN_NAME_THING_ID = "thing_id";
//
// public static final String COLUMN_NAME_WEEK_NUM = "week_num";
//
// public static final String DEFAULT_DATE = "2000-01-01";
//
//
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/DateUtil.java
// public class DateUtil {
// public static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
//
// private DateUtil() {}
//
// public static String dateToString(Date d) {
// return dateFormat.format(d);
// }
//
// public static Date parseDate(String s) {
// try {
// return dateFormat.parse(s);
// } catch (ParseException e) {
// e.printStackTrace();
// }
//
// return null;
// }
//
// public static int getDayOfWeek(Date date) {
// if (date == null) {
// return -1;
// }
// Calendar c = Calendar.getInstance();
// c.setTime(date);
//
// return c.get(Calendar.DAY_OF_WEEK);
// }
// }
|
import com.zhenghuiyan.todaything.data.ScheduleContract;
import com.zhenghuiyan.todaything.util.DateUtil;
import java.io.Serializable;
import java.util.Date;
|
private int weekNum = 0;
//binary num of sunday to saturday
private final int SUN_NUM = 0b00000001;
private final int MON_NUM = 0b00000010;
private final int TUE_NUM = 0b00000100;
private final int WED_NUM = 0b00001000;
private final int THU_NUM = 0b00010000;
private final int FRI_NUM = 0b00100000;
private final int SAT_NUM = 0b01000000;
public Thing() {}
public Thing(int id, String sTime, String content, HourAndMinute fromTime, HourAndMinute toTime, int thing_id, String complete_date, int weekNumber) {
this.id = id;
this.sTime = sTime;
this.content = content;
this.fromTime = fromTime;
this.toTime = toTime;
this.thing_id = thing_id;
this.complete_date = complete_date;
this.weekNum = weekNumber;
}
public Thing(int id, String sTime, String content, String fromTime, String toTime, int thing_id, String complete_date, int weekNumber) {
this(id, sTime, content, new HourAndMinute(fromTime), new HourAndMinute(toTime), thing_id, complete_date, weekNumber);
}
public static boolean isTodayRepeatByWeekNum(int weekNum) {
Thing t = new Thing();
t.setWeekNum(weekNum);
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/data/ScheduleContract.java
// public final class ScheduleContract {
//
// public ScheduleContract() {}
//
// public static abstract class ScheduleContractEntry implements BaseColumns {
// public static final String TABLE_NAME = "Schedule";
//
// public static final String COLUMN_NAME_ID = "id";
//
// public static final String COLUMN_NAME_STIME = "s_time";
//
// public static final String COLUMN_NAME_CONTENT = "content";
//
// public static final String COLUMN_NAME_FROM_TIME = "from_time";
//
// public static final String COLUMN_NAME_TO_TIME = "to_time";
//
// public static final String COLUMN_NAME_COMPLETE_DATE = "complete_date";
//
// public static final String COLUMN_NAME_THING_ID = "thing_id";
//
// public static final String COLUMN_NAME_WEEK_NUM = "week_num";
//
// public static final String DEFAULT_DATE = "2000-01-01";
//
//
// }
// }
//
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/DateUtil.java
// public class DateUtil {
// public static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
//
// private DateUtil() {}
//
// public static String dateToString(Date d) {
// return dateFormat.format(d);
// }
//
// public static Date parseDate(String s) {
// try {
// return dateFormat.parse(s);
// } catch (ParseException e) {
// e.printStackTrace();
// }
//
// return null;
// }
//
// public static int getDayOfWeek(Date date) {
// if (date == null) {
// return -1;
// }
// Calendar c = Calendar.getInstance();
// c.setTime(date);
//
// return c.get(Calendar.DAY_OF_WEEK);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/bean/Thing.java
import com.zhenghuiyan.todaything.data.ScheduleContract;
import com.zhenghuiyan.todaything.util.DateUtil;
import java.io.Serializable;
import java.util.Date;
private int weekNum = 0;
//binary num of sunday to saturday
private final int SUN_NUM = 0b00000001;
private final int MON_NUM = 0b00000010;
private final int TUE_NUM = 0b00000100;
private final int WED_NUM = 0b00001000;
private final int THU_NUM = 0b00010000;
private final int FRI_NUM = 0b00100000;
private final int SAT_NUM = 0b01000000;
public Thing() {}
public Thing(int id, String sTime, String content, HourAndMinute fromTime, HourAndMinute toTime, int thing_id, String complete_date, int weekNumber) {
this.id = id;
this.sTime = sTime;
this.content = content;
this.fromTime = fromTime;
this.toTime = toTime;
this.thing_id = thing_id;
this.complete_date = complete_date;
this.weekNum = weekNumber;
}
public Thing(int id, String sTime, String content, String fromTime, String toTime, int thing_id, String complete_date, int weekNumber) {
this(id, sTime, content, new HourAndMinute(fromTime), new HourAndMinute(toTime), thing_id, complete_date, weekNumber);
}
public static boolean isTodayRepeatByWeekNum(int weekNum) {
Thing t = new Thing();
t.setWeekNum(weekNum);
|
int dayOfWeek = DateUtil.getDayOfWeek(new Date());
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/data/SPManager.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
|
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import com.zhenghuiyan.todaything.util.MiscUtil;
|
package com.zhenghuiyan.todaything.data;
/**
* Created by zhenghuiyan on 2015/2/2.
*/
public class SPManager {
public SharedPreferences mSharedPreferences;
public static final String LAST_THING_ID_KEY = "last_thing_id";
public static final String LAST_SYNC_TIME = "last_sync_time";
private static class SingletonHolder{
private static final SPManager instance = new SPManager();
}
public static SPManager getInstance() {
return SingletonHolder.instance;
}
private SPManager() {
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/data/SPManager.java
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import com.zhenghuiyan.todaything.util.MiscUtil;
package com.zhenghuiyan.todaything.data;
/**
* Created by zhenghuiyan on 2015/2/2.
*/
public class SPManager {
public SharedPreferences mSharedPreferences;
public static final String LAST_THING_ID_KEY = "last_thing_id";
public static final String LAST_SYNC_TIME = "last_sync_time";
private static class SingletonHolder{
private static final SPManager instance = new SPManager();
}
public static SPManager getInstance() {
return SingletonHolder.instance;
}
private SPManager() {
|
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(MiscUtil.getAppContext());
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/widget/WeekButton.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
|
import android.content.Context;
import android.util.AttributeSet;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import com.zhenghuiyan.todaything.R;
import com.zhenghuiyan.todaything.util.MiscUtil;
|
package com.zhenghuiyan.todaything.widget;
/**
* Created by zhenghuiyan on 2015/2/5.
*/
public class WeekButton extends CheckBox {
public WeekButton(Context context, AttributeSet attrs) {
super(context, attrs);
setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/widget/WeekButton.java
import android.content.Context;
import android.util.AttributeSet;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import com.zhenghuiyan.todaything.R;
import com.zhenghuiyan.todaything.util.MiscUtil;
package com.zhenghuiyan.todaything.widget;
/**
* Created by zhenghuiyan on 2015/2/5.
*/
public class WeekButton extends CheckBox {
public WeekButton(Context context, AttributeSet attrs) {
super(context, attrs);
setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
|
WeekButton.this.setTextColor(MiscUtil.getAppContext().getResources().getColor(android.R.color.white));
|
zhenghuiy/TodayThing
|
app/src/main/java/com/zhenghuiyan/todaything/TodayThingApp.java
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
|
import android.app.Application;
import com.zhenghuiyan.todaything.util.MiscUtil;
|
package com.zhenghuiyan.todaything;
/**
* Created by zhenghuiyan on 2015/1/28.
*/
public class TodayThingApp extends Application {
@Override
public void onCreate() {
super.onCreate();
init();
}
private void init() {
|
// Path: app/src/main/java/com/zhenghuiyan/todaything/util/MiscUtil.java
// public class MiscUtil {
// public static Context appContext = null;
//
// private MiscUtil() {}
//
// public static void setAppContext(Context c) {
// appContext = c;
// }
//
// public static Context getAppContext() {
// return appContext;
// }
//
// /**
// *
// * */
//
// public static void goToActivity(Activity a, Fragment f, Class<?> b, Bundle bundle, boolean isForResult) {
// Intent intent = new Intent(a, b);
// if (bundle != null) {
// intent.putExtras(bundle);
// }
// if (isForResult && (f != null)) {
// f.startActivityForResult(intent, 0);
// } else if (isForResult && (f == null)) {
// a.startActivityForResult(intent, 0);
// } else {
// a.startActivity(intent);
// }
// a.overridePendingTransition(R.anim.zero_scale_one, R.anim.keep_status);
// }
// }
// Path: app/src/main/java/com/zhenghuiyan/todaything/TodayThingApp.java
import android.app.Application;
import com.zhenghuiyan.todaything.util.MiscUtil;
package com.zhenghuiyan.todaything;
/**
* Created by zhenghuiyan on 2015/1/28.
*/
public class TodayThingApp extends Application {
@Override
public void onCreate() {
super.onCreate();
init();
}
private void init() {
|
MiscUtil.setAppContext(getApplicationContext());
|
thymeleaf/thymeleafexamples-layouts
|
src/main/java/thymeleafexamples/layouts/signup/SignupForm.java
|
// Path: src/main/java/thymeleafexamples/layouts/account/Account.java
// @SuppressWarnings("serial")
// @Entity
// @Table(name = "account")
// public class Account implements java.io.Serializable {
//
// @Id
// @GeneratedValue
// private Long id;
//
// @Column(unique = true)
// private String email;
//
// @JsonIgnore
// private String password;
//
// private String role = "ROLE_USER";
//
// private Instant created;
//
// protected Account() {
//
// }
//
// public Account(String email, String password, String role) {
// this.email = email;
// this.password = password;
// this.role = role;
// this.created = Instant.now();
// }
//
// public Long getId() {
// return id;
// }
//
// public String getEmail() {
// return email;
// }
//
// public void setEmail(String email) {
// this.email = email;
// }
//
// public String getPassword() {
// return password;
// }
//
// public void setPassword(String password) {
// this.password = password;
// }
//
// public String getRole() {
// return role;
// }
//
// public void setRole(String role) {
// this.role = role;
// }
//
// public Instant getCreated() {
// return created;
// }
// }
|
import org.hibernate.validator.constraints.*;
import thymeleafexamples.layouts.account.Account;
|
package thymeleafexamples.layouts.signup;
public class SignupForm {
private static final String NOT_BLANK_MESSAGE = "{notBlank.message}";
private static final String EMAIL_MESSAGE = "{email.message}";
@NotBlank(message = SignupForm.NOT_BLANK_MESSAGE)
@Email(message = SignupForm.EMAIL_MESSAGE)
private String email;
@NotBlank(message = SignupForm.NOT_BLANK_MESSAGE)
private String password;
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
|
// Path: src/main/java/thymeleafexamples/layouts/account/Account.java
// @SuppressWarnings("serial")
// @Entity
// @Table(name = "account")
// public class Account implements java.io.Serializable {
//
// @Id
// @GeneratedValue
// private Long id;
//
// @Column(unique = true)
// private String email;
//
// @JsonIgnore
// private String password;
//
// private String role = "ROLE_USER";
//
// private Instant created;
//
// protected Account() {
//
// }
//
// public Account(String email, String password, String role) {
// this.email = email;
// this.password = password;
// this.role = role;
// this.created = Instant.now();
// }
//
// public Long getId() {
// return id;
// }
//
// public String getEmail() {
// return email;
// }
//
// public void setEmail(String email) {
// this.email = email;
// }
//
// public String getPassword() {
// return password;
// }
//
// public void setPassword(String password) {
// this.password = password;
// }
//
// public String getRole() {
// return role;
// }
//
// public void setRole(String role) {
// this.role = role;
// }
//
// public Instant getCreated() {
// return created;
// }
// }
// Path: src/main/java/thymeleafexamples/layouts/signup/SignupForm.java
import org.hibernate.validator.constraints.*;
import thymeleafexamples.layouts.account.Account;
package thymeleafexamples.layouts.signup;
public class SignupForm {
private static final String NOT_BLANK_MESSAGE = "{notBlank.message}";
private static final String EMAIL_MESSAGE = "{email.message}";
@NotBlank(message = SignupForm.NOT_BLANK_MESSAGE)
@Email(message = SignupForm.EMAIL_MESSAGE)
private String email;
@NotBlank(message = SignupForm.NOT_BLANK_MESSAGE)
private String password;
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
|
public Account createAccount() {
|
thymeleaf/thymeleafexamples-layouts
|
src/main/java/thymeleafexamples/layouts/config/SecurityConfig.java
|
// Path: src/main/java/thymeleafexamples/layouts/account/AccountService.java
// @Service
// @Scope(proxyMode = ScopedProxyMode.TARGET_CLASS)
// public class AccountService implements UserDetailsService {
//
// @Autowired
// private AccountRepository accountRepository;
//
// @Autowired
// private PasswordEncoder passwordEncoder;
//
// @PostConstruct
// protected void initialize() {
// save(new Account("user", "demo", "ROLE_USER"));
// save(new Account("admin", "admin", "ROLE_ADMIN"));
// }
//
// @Transactional
// public Account save(Account account) {
// account.setPassword(passwordEncoder.encode(account.getPassword()));
// accountRepository.save(account);
// return account;
// }
//
// @Override
// public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
// Account account = accountRepository.findOneByEmail(username);
// if(account == null) {
// throw new UsernameNotFoundException("user not found");
// }
// return createUser(account);
// }
//
// public void signin(Account account) {
// SecurityContextHolder.getContext().setAuthentication(authenticate(account));
// }
//
// private Authentication authenticate(Account account) {
// return new UsernamePasswordAuthenticationToken(createUser(account), null, Collections.singleton(createAuthority(account)));
// }
//
// private User createUser(Account account) {
// return new User(account.getEmail(), account.getPassword(), Collections.singleton(createAuthority(account)));
// }
//
// private GrantedAuthority createAuthority(Account account) {
// return new SimpleGrantedAuthority(account.getRole());
// }
//
// }
|
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.rememberme.TokenBasedRememberMeServices;
import thymeleafexamples.layouts.account.AccountService;
|
package thymeleafexamples.layouts.config;
@Configuration
@EnableWebSecurity
@EnableGlobalMethodSecurity(securedEnabled = true)
class SecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
|
// Path: src/main/java/thymeleafexamples/layouts/account/AccountService.java
// @Service
// @Scope(proxyMode = ScopedProxyMode.TARGET_CLASS)
// public class AccountService implements UserDetailsService {
//
// @Autowired
// private AccountRepository accountRepository;
//
// @Autowired
// private PasswordEncoder passwordEncoder;
//
// @PostConstruct
// protected void initialize() {
// save(new Account("user", "demo", "ROLE_USER"));
// save(new Account("admin", "admin", "ROLE_ADMIN"));
// }
//
// @Transactional
// public Account save(Account account) {
// account.setPassword(passwordEncoder.encode(account.getPassword()));
// accountRepository.save(account);
// return account;
// }
//
// @Override
// public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
// Account account = accountRepository.findOneByEmail(username);
// if(account == null) {
// throw new UsernameNotFoundException("user not found");
// }
// return createUser(account);
// }
//
// public void signin(Account account) {
// SecurityContextHolder.getContext().setAuthentication(authenticate(account));
// }
//
// private Authentication authenticate(Account account) {
// return new UsernamePasswordAuthenticationToken(createUser(account), null, Collections.singleton(createAuthority(account)));
// }
//
// private User createUser(Account account) {
// return new User(account.getEmail(), account.getPassword(), Collections.singleton(createAuthority(account)));
// }
//
// private GrantedAuthority createAuthority(Account account) {
// return new SimpleGrantedAuthority(account.getRole());
// }
//
// }
// Path: src/main/java/thymeleafexamples/layouts/config/SecurityConfig.java
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.rememberme.TokenBasedRememberMeServices;
import thymeleafexamples.layouts.account.AccountService;
package thymeleafexamples.layouts.config;
@Configuration
@EnableWebSecurity
@EnableGlobalMethodSecurity(securedEnabled = true)
class SecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
|
private AccountService accountService;
|
thymeleaf/thymeleafexamples-layouts
|
src/main/java/thymeleafexamples/layouts/config/JpaConfig.java
|
// Path: src/main/java/thymeleafexamples/layouts/Application.java
// public interface Application {}
|
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.hibernate.cfg.Environment;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.convert.threeten.Jsr310JpaConverters;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.util.ClassUtils;
import thymeleafexamples.layouts.Application;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.Properties;
|
package thymeleafexamples.layouts.config;
@Configuration
@EnableTransactionManagement
|
// Path: src/main/java/thymeleafexamples/layouts/Application.java
// public interface Application {}
// Path: src/main/java/thymeleafexamples/layouts/config/JpaConfig.java
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.hibernate.cfg.Environment;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.convert.threeten.Jsr310JpaConverters;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.util.ClassUtils;
import thymeleafexamples.layouts.Application;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.Properties;
package thymeleafexamples.layouts.config;
@Configuration
@EnableTransactionManagement
|
@EnableJpaRepositories(basePackageClasses = Application.class)
|
thymeleaf/thymeleafexamples-layouts
|
src/main/java/thymeleafexamples/layouts/support/web/MessageHelper.java
|
// Path: src/main/java/thymeleafexamples/layouts/support/web/Message.java
// public static final String MESSAGE_ATTRIBUTE = "message";
|
import static thymeleafexamples.layouts.support.web.Message.MESSAGE_ATTRIBUTE;
import org.springframework.ui.Model;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
|
package thymeleafexamples.layouts.support.web;
public final class MessageHelper {
private MessageHelper() {
}
public static void addSuccessAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.SUCCESS, args);
}
public static void addErrorAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.DANGER, args);
}
public static void addInfoAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.INFO, args);
}
public static void addWarningAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.WARNING, args);
}
private static void addAttribute(RedirectAttributes ra, String message, Message.Type type, Object... args) {
|
// Path: src/main/java/thymeleafexamples/layouts/support/web/Message.java
// public static final String MESSAGE_ATTRIBUTE = "message";
// Path: src/main/java/thymeleafexamples/layouts/support/web/MessageHelper.java
import static thymeleafexamples.layouts.support.web.Message.MESSAGE_ATTRIBUTE;
import org.springframework.ui.Model;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
package thymeleafexamples.layouts.support.web;
public final class MessageHelper {
private MessageHelper() {
}
public static void addSuccessAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.SUCCESS, args);
}
public static void addErrorAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.DANGER, args);
}
public static void addInfoAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.INFO, args);
}
public static void addWarningAttribute(RedirectAttributes ra, String message, Object... args) {
addAttribute(ra, message, Message.Type.WARNING, args);
}
private static void addAttribute(RedirectAttributes ra, String message, Message.Type type, Object... args) {
|
ra.addFlashAttribute(MESSAGE_ATTRIBUTE, new Message(message, type, args));
|
thymeleaf/thymeleafexamples-layouts
|
src/main/java/thymeleafexamples/layouts/config/ApplicationConfig.java
|
// Path: src/main/java/thymeleafexamples/layouts/Application.java
// public interface Application {}
|
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import thymeleafexamples.layouts.Application;
|
package thymeleafexamples.layouts.config;
@Configuration
@PropertySource("classpath:persistence.properties")
@PropertySource("classpath:application.properties")
|
// Path: src/main/java/thymeleafexamples/layouts/Application.java
// public interface Application {}
// Path: src/main/java/thymeleafexamples/layouts/config/ApplicationConfig.java
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import thymeleafexamples.layouts.Application;
package thymeleafexamples.layouts.config;
@Configuration
@PropertySource("classpath:persistence.properties")
@PropertySource("classpath:application.properties")
|
@ComponentScan(basePackageClasses = Application.class)
|
nla/outbackcdx
|
test/outbackcdx/ReplicationFeaturesTest.java
|
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Status implements IStatus {
// SWITCH_PROTOCOL(101, "Switching Protocols"), OK(200, "OK"), CREATED(201, "Created"), ACCEPTED(202, "Accepted"), NO_CONTENT(204, "No Content"), PARTIAL_CONTENT(206, "Partial Content"), REDIRECT(301,
// "Moved Permanently"), NOT_MODIFIED(304, "Not Modified"), TEMPORARY_REDIRECT(307, "Temporary Redirect"), BAD_REQUEST(400, "Bad Request"), UNAUTHORIZED(401,
// "Unauthorized"), FORBIDDEN(403, "Forbidden"), NOT_FOUND(404, "Not Found"), METHOD_NOT_ALLOWED(405, "Method Not Allowed"), PAYLOAD_TOO_LARGE(413, "Payload Too Large"), RANGE_NOT_SATISFIABLE(416,
// "Requested Range Not Satisfiable"), INTERNAL_ERROR(500, "Internal Server Error");
// private final int requestStatus;
// private final String description;
//
// Status(int requestStatus, String description) {
// this.requestStatus = requestStatus;
// this.description = description;
// }
//
// @Override
// public int getRequestStatus() {
// return this.requestStatus;
// }
//
// @Override
// public String getDescription() {
// return "" + this.requestStatus + " " + description;
// }
// }
//
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Method {
// GET, PUT, POST, DELETE, HEAD, OPTIONS;
//
// static Method lookup(String method) {
// for (Method m : Method.values()) {
// if (m.toString().equalsIgnoreCase(method)) {
// return m;
// }
// }
// return null;
// }
// }
|
import org.junit.*;
import org.junit.rules.TemporaryFolder;
import outbackcdx.NanoHTTPD.Response.Status;
import outbackcdx.auth.Permit;
import java.io.*;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.Map;
import java.util.Scanner;
import static org.junit.Assert.assertEquals;
import static outbackcdx.NanoHTTPD.Method.*;
import static outbackcdx.NanoHTTPD.Response.Status.OK;
|
package outbackcdx;
public class ReplicationFeaturesTest {
@Rule
public TemporaryFolder folder = new TemporaryFolder();
private Webapp webapp;
private DataStore manager;
@Before
public void setUp() throws IOException {
File root = folder.newFolder();
manager = new DataStore(root, 256, null, Long.MAX_VALUE, null);
webapp = new Webapp(manager, false, Collections.emptyMap(), null, Collections.emptyMap(), 10000);
}
@After
public void tearDown() {
}
// tests for replication features:
// ensure that write urls are disabled on secondary
// ensure that we can retrieve a sequenceNumber on secondary
// ensure that we can delete WALs on primary
@Test
public void testReadOnly() throws Exception {
FeatureFlags.setSecondaryMode(true);
// make a request to a write-able url
// it should 401.
|
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Status implements IStatus {
// SWITCH_PROTOCOL(101, "Switching Protocols"), OK(200, "OK"), CREATED(201, "Created"), ACCEPTED(202, "Accepted"), NO_CONTENT(204, "No Content"), PARTIAL_CONTENT(206, "Partial Content"), REDIRECT(301,
// "Moved Permanently"), NOT_MODIFIED(304, "Not Modified"), TEMPORARY_REDIRECT(307, "Temporary Redirect"), BAD_REQUEST(400, "Bad Request"), UNAUTHORIZED(401,
// "Unauthorized"), FORBIDDEN(403, "Forbidden"), NOT_FOUND(404, "Not Found"), METHOD_NOT_ALLOWED(405, "Method Not Allowed"), PAYLOAD_TOO_LARGE(413, "Payload Too Large"), RANGE_NOT_SATISFIABLE(416,
// "Requested Range Not Satisfiable"), INTERNAL_ERROR(500, "Internal Server Error");
// private final int requestStatus;
// private final String description;
//
// Status(int requestStatus, String description) {
// this.requestStatus = requestStatus;
// this.description = description;
// }
//
// @Override
// public int getRequestStatus() {
// return this.requestStatus;
// }
//
// @Override
// public String getDescription() {
// return "" + this.requestStatus + " " + description;
// }
// }
//
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Method {
// GET, PUT, POST, DELETE, HEAD, OPTIONS;
//
// static Method lookup(String method) {
// for (Method m : Method.values()) {
// if (m.toString().equalsIgnoreCase(method)) {
// return m;
// }
// }
// return null;
// }
// }
// Path: test/outbackcdx/ReplicationFeaturesTest.java
import org.junit.*;
import org.junit.rules.TemporaryFolder;
import outbackcdx.NanoHTTPD.Response.Status;
import outbackcdx.auth.Permit;
import java.io.*;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.Map;
import java.util.Scanner;
import static org.junit.Assert.assertEquals;
import static outbackcdx.NanoHTTPD.Method.*;
import static outbackcdx.NanoHTTPD.Response.Status.OK;
package outbackcdx;
public class ReplicationFeaturesTest {
@Rule
public TemporaryFolder folder = new TemporaryFolder();
private Webapp webapp;
private DataStore manager;
@Before
public void setUp() throws IOException {
File root = folder.newFolder();
manager = new DataStore(root, 256, null, Long.MAX_VALUE, null);
webapp = new Webapp(manager, false, Collections.emptyMap(), null, Collections.emptyMap(), 10000);
}
@After
public void tearDown() {
}
// tests for replication features:
// ensure that write urls are disabled on secondary
// ensure that we can retrieve a sequenceNumber on secondary
// ensure that we can delete WALs on primary
@Test
public void testReadOnly() throws Exception {
FeatureFlags.setSecondaryMode(true);
// make a request to a write-able url
// it should 401.
|
POST("/test", "- 20050614070159 http://nla.gov.au/ text/html 200 AKMCCEPOOWFMGGO5635HFZXGFRLRGWIX - 337023 NLA-AU-CRAWL-000-20050614070144-00003-crawling016.archive.org\n- 20030614070159 http://example.com/ text/html 200 AKMCCEPOOWFMGGO5635HFZXGFRLRGWIX - - - 337023 NLA-AU-CRAWL-000-20050614070144-00003-crawling016.archive.org\n", Status.FORBIDDEN);
|
nla/outbackcdx
|
test/outbackcdx/ReplicationFeaturesTest.java
|
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Status implements IStatus {
// SWITCH_PROTOCOL(101, "Switching Protocols"), OK(200, "OK"), CREATED(201, "Created"), ACCEPTED(202, "Accepted"), NO_CONTENT(204, "No Content"), PARTIAL_CONTENT(206, "Partial Content"), REDIRECT(301,
// "Moved Permanently"), NOT_MODIFIED(304, "Not Modified"), TEMPORARY_REDIRECT(307, "Temporary Redirect"), BAD_REQUEST(400, "Bad Request"), UNAUTHORIZED(401,
// "Unauthorized"), FORBIDDEN(403, "Forbidden"), NOT_FOUND(404, "Not Found"), METHOD_NOT_ALLOWED(405, "Method Not Allowed"), PAYLOAD_TOO_LARGE(413, "Payload Too Large"), RANGE_NOT_SATISFIABLE(416,
// "Requested Range Not Satisfiable"), INTERNAL_ERROR(500, "Internal Server Error");
// private final int requestStatus;
// private final String description;
//
// Status(int requestStatus, String description) {
// this.requestStatus = requestStatus;
// this.description = description;
// }
//
// @Override
// public int getRequestStatus() {
// return this.requestStatus;
// }
//
// @Override
// public String getDescription() {
// return "" + this.requestStatus + " " + description;
// }
// }
//
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Method {
// GET, PUT, POST, DELETE, HEAD, OPTIONS;
//
// static Method lookup(String method) {
// for (Method m : Method.values()) {
// if (m.toString().equalsIgnoreCase(method)) {
// return m;
// }
// }
// return null;
// }
// }
|
import org.junit.*;
import org.junit.rules.TemporaryFolder;
import outbackcdx.NanoHTTPD.Response.Status;
import outbackcdx.auth.Permit;
import java.io.*;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.Map;
import java.util.Scanner;
import static org.junit.Assert.assertEquals;
import static outbackcdx.NanoHTTPD.Method.*;
import static outbackcdx.NanoHTTPD.Response.Status.OK;
|
}
private String POST(String url, String data, NanoHTTPD.Response.Status expectedStatus) throws Exception {
ReplicationFeaturesTest.DummySession session = new ReplicationFeaturesTest.DummySession(POST, url);
session.data(data);
NanoHTTPD.Response response = webapp.handle(new Web.NRequest(session, Permit.full(), ""));
assertEquals(expectedStatus, response.getStatus());
return slurp(response);
}
private String slurp(NanoHTTPD.Response response) throws IOException {
NanoHTTPD.IStreamer streamer = response.getStreamer();
if (streamer != null) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
streamer.stream(out);
return out.toString("UTF-8");
}
InputStream data = response.getData();
if (data != null) {
Scanner scanner = new Scanner(response.getData(), "UTF-8").useDelimiter("\\Z");
if (scanner.hasNext()) {
return scanner.next();
}
}
return "";
}
private static class DummySession implements NanoHTTPD.IHTTPSession {
|
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Status implements IStatus {
// SWITCH_PROTOCOL(101, "Switching Protocols"), OK(200, "OK"), CREATED(201, "Created"), ACCEPTED(202, "Accepted"), NO_CONTENT(204, "No Content"), PARTIAL_CONTENT(206, "Partial Content"), REDIRECT(301,
// "Moved Permanently"), NOT_MODIFIED(304, "Not Modified"), TEMPORARY_REDIRECT(307, "Temporary Redirect"), BAD_REQUEST(400, "Bad Request"), UNAUTHORIZED(401,
// "Unauthorized"), FORBIDDEN(403, "Forbidden"), NOT_FOUND(404, "Not Found"), METHOD_NOT_ALLOWED(405, "Method Not Allowed"), PAYLOAD_TOO_LARGE(413, "Payload Too Large"), RANGE_NOT_SATISFIABLE(416,
// "Requested Range Not Satisfiable"), INTERNAL_ERROR(500, "Internal Server Error");
// private final int requestStatus;
// private final String description;
//
// Status(int requestStatus, String description) {
// this.requestStatus = requestStatus;
// this.description = description;
// }
//
// @Override
// public int getRequestStatus() {
// return this.requestStatus;
// }
//
// @Override
// public String getDescription() {
// return "" + this.requestStatus + " " + description;
// }
// }
//
// Path: src/outbackcdx/NanoHTTPD.java
// public enum Method {
// GET, PUT, POST, DELETE, HEAD, OPTIONS;
//
// static Method lookup(String method) {
// for (Method m : Method.values()) {
// if (m.toString().equalsIgnoreCase(method)) {
// return m;
// }
// }
// return null;
// }
// }
// Path: test/outbackcdx/ReplicationFeaturesTest.java
import org.junit.*;
import org.junit.rules.TemporaryFolder;
import outbackcdx.NanoHTTPD.Response.Status;
import outbackcdx.auth.Permit;
import java.io.*;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.Map;
import java.util.Scanner;
import static org.junit.Assert.assertEquals;
import static outbackcdx.NanoHTTPD.Method.*;
import static outbackcdx.NanoHTTPD.Response.Status.OK;
}
private String POST(String url, String data, NanoHTTPD.Response.Status expectedStatus) throws Exception {
ReplicationFeaturesTest.DummySession session = new ReplicationFeaturesTest.DummySession(POST, url);
session.data(data);
NanoHTTPD.Response response = webapp.handle(new Web.NRequest(session, Permit.full(), ""));
assertEquals(expectedStatus, response.getStatus());
return slurp(response);
}
private String slurp(NanoHTTPD.Response response) throws IOException {
NanoHTTPD.IStreamer streamer = response.getStreamer();
if (streamer != null) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
streamer.stream(out);
return out.toString("UTF-8");
}
InputStream data = response.getData();
if (data != null) {
Scanner scanner = new Scanner(response.getData(), "UTF-8").useDelimiter("\\Z");
if (scanner.hasNext()) {
return scanner.next();
}
}
return "";
}
private static class DummySession implements NanoHTTPD.IHTTPSession {
|
private final NanoHTTPD.Method method;
|
nla/outbackcdx
|
src/outbackcdx/Main.java
|
// Path: src/outbackcdx/UrlCanonicalizer.java
// public static class ConfigurationException extends Exception {
// private static final long serialVersionUID = 1L;
// public ConfigurationException(String msg) {
// super(msg);
// }
// }
|
import com.sun.management.OperatingSystemMXBean;
import com.sun.management.UnixOperatingSystemMXBean;
import outbackcdx.UrlCanonicalizer.ConfigurationException;
import outbackcdx.auth.Authorizer;
import outbackcdx.auth.JwtAuthorizer;
import outbackcdx.auth.KeycloakConfig;
import outbackcdx.auth.NullAuthorizer;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.URL;
import java.nio.channels.Channel;
import java.nio.channels.ServerSocketChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
|
server.start();
System.out.println("OutbackCDX http://" + (host == null ? "localhost" : host) + ":" + port);
synchronized (Main.class) {
Main.class.wait();
}
} else {
ServerSocket socket = openSocket(host, port, inheritSocket);
Web.Server server = new Web.Server(socket, contextPath, controller, authorizer);
ExecutorService threadPool = Executors.newFixedThreadPool(webThreads);
for (String collectionUrl: collectionUrls) {
ChangePollingThread cpt = new ChangePollingThread(collectionUrl, pollingInterval, batchSize, dataStore);
cpt.setDaemon(true);
cpt.start();
}
try {
server.setAsyncRunner(threadPool);
server.start();
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
server.stop();
dataStore.close();
}));
System.out.println("OutbackCDX http://" + (host == null ? "localhost" : host) + ":" + port + contextPath);
synchronized (Main.class) {
Main.class.wait();
}
} finally {
threadPool.shutdown();
}
}
}
|
// Path: src/outbackcdx/UrlCanonicalizer.java
// public static class ConfigurationException extends Exception {
// private static final long serialVersionUID = 1L;
// public ConfigurationException(String msg) {
// super(msg);
// }
// }
// Path: src/outbackcdx/Main.java
import com.sun.management.OperatingSystemMXBean;
import com.sun.management.UnixOperatingSystemMXBean;
import outbackcdx.UrlCanonicalizer.ConfigurationException;
import outbackcdx.auth.Authorizer;
import outbackcdx.auth.JwtAuthorizer;
import outbackcdx.auth.KeycloakConfig;
import outbackcdx.auth.NullAuthorizer;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.URL;
import java.nio.channels.Channel;
import java.nio.channels.ServerSocketChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
server.start();
System.out.println("OutbackCDX http://" + (host == null ? "localhost" : host) + ":" + port);
synchronized (Main.class) {
Main.class.wait();
}
} else {
ServerSocket socket = openSocket(host, port, inheritSocket);
Web.Server server = new Web.Server(socket, contextPath, controller, authorizer);
ExecutorService threadPool = Executors.newFixedThreadPool(webThreads);
for (String collectionUrl: collectionUrls) {
ChangePollingThread cpt = new ChangePollingThread(collectionUrl, pollingInterval, batchSize, dataStore);
cpt.setDaemon(true);
cpt.start();
}
try {
server.setAsyncRunner(threadPool);
server.start();
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
server.stop();
dataStore.close();
}));
System.out.println("OutbackCDX http://" + (host == null ? "localhost" : host) + ":" + port + contextPath);
synchronized (Main.class) {
Main.class.wait();
}
} finally {
threadPool.shutdown();
}
}
}
|
} catch (InterruptedException | IOException | ConfigurationException e) {
|
heroku/heroku-maven-plugin
|
heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/RunWar.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StdOutOutputAdapter.java
// public class StdOutOutputAdapter implements OutputAdapter {
// private boolean suppressUploadProgress;
//
// public StdOutOutputAdapter(boolean suppressUploadProgress) {
// this.suppressUploadProgress = suppressUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// System.out.println("INFO: " + message);
// }
//
// @Override
// public void logDebug(String message) {
// System.out.println("DEBUG: " + message);
// }
//
// @Override
// public void logWarn(String message) {
// System.out.println("WARN: " + message);
// }
//
// @Override
// public void logError(String message) {
// System.out.println("ERROR: " + message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (!suppressUploadProgress) {
// System.out.printf("Upload progress: %.0f%%\n", ((double) uploaded / (double) contentLength) * 100);
// }
// }
// }
|
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.deploy.standalone.StdOutOutputAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
|
package com.heroku.sdk.deploy;
// Entry point for running a WAR locally. Located in this package to be consistent with DeployJar and DeployWar.
public class RunWar {
public static void main(String[] args) throws IOException, InterruptedException {
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StdOutOutputAdapter.java
// public class StdOutOutputAdapter implements OutputAdapter {
// private boolean suppressUploadProgress;
//
// public StdOutOutputAdapter(boolean suppressUploadProgress) {
// this.suppressUploadProgress = suppressUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// System.out.println("INFO: " + message);
// }
//
// @Override
// public void logDebug(String message) {
// System.out.println("DEBUG: " + message);
// }
//
// @Override
// public void logWarn(String message) {
// System.out.println("WARN: " + message);
// }
//
// @Override
// public void logError(String message) {
// System.out.println("ERROR: " + message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (!suppressUploadProgress) {
// System.out.printf("Upload progress: %.0f%%\n", ((double) uploaded / (double) contentLength) * 100);
// }
// }
// }
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/RunWar.java
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.deploy.standalone.StdOutOutputAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
package com.heroku.sdk.deploy;
// Entry point for running a WAR locally. Located in this package to be consistent with DeployJar and DeployWar.
public class RunWar {
public static void main(String[] args) throws IOException, InterruptedException {
|
final OutputAdapter outputAdapter = new StdOutOutputAdapter(false);
|
heroku/heroku-maven-plugin
|
heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/RunWar.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StdOutOutputAdapter.java
// public class StdOutOutputAdapter implements OutputAdapter {
// private boolean suppressUploadProgress;
//
// public StdOutOutputAdapter(boolean suppressUploadProgress) {
// this.suppressUploadProgress = suppressUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// System.out.println("INFO: " + message);
// }
//
// @Override
// public void logDebug(String message) {
// System.out.println("DEBUG: " + message);
// }
//
// @Override
// public void logWarn(String message) {
// System.out.println("WARN: " + message);
// }
//
// @Override
// public void logError(String message) {
// System.out.println("ERROR: " + message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (!suppressUploadProgress) {
// System.out.printf("Upload progress: %.0f%%\n", ((double) uploaded / (double) contentLength) * 100);
// }
// }
// }
|
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.deploy.standalone.StdOutOutputAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
|
package com.heroku.sdk.deploy;
// Entry point for running a WAR locally. Located in this package to be consistent with DeployJar and DeployWar.
public class RunWar {
public static void main(String[] args) throws IOException, InterruptedException {
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StdOutOutputAdapter.java
// public class StdOutOutputAdapter implements OutputAdapter {
// private boolean suppressUploadProgress;
//
// public StdOutOutputAdapter(boolean suppressUploadProgress) {
// this.suppressUploadProgress = suppressUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// System.out.println("INFO: " + message);
// }
//
// @Override
// public void logDebug(String message) {
// System.out.println("DEBUG: " + message);
// }
//
// @Override
// public void logWarn(String message) {
// System.out.println("WARN: " + message);
// }
//
// @Override
// public void logError(String message) {
// System.out.println("ERROR: " + message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (!suppressUploadProgress) {
// System.out.printf("Upload progress: %.0f%%\n", ((double) uploaded / (double) contentLength) * 100);
// }
// }
// }
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/RunWar.java
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.deploy.standalone.StdOutOutputAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
package com.heroku.sdk.deploy;
// Entry point for running a WAR locally. Located in this package to be consistent with DeployJar and DeployWar.
public class RunWar {
public static void main(String[] args) throws IOException, InterruptedException {
|
final OutputAdapter outputAdapter = new StdOutOutputAdapter(false);
|
heroku/heroku-maven-plugin
|
heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/RunWar.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StdOutOutputAdapter.java
// public class StdOutOutputAdapter implements OutputAdapter {
// private boolean suppressUploadProgress;
//
// public StdOutOutputAdapter(boolean suppressUploadProgress) {
// this.suppressUploadProgress = suppressUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// System.out.println("INFO: " + message);
// }
//
// @Override
// public void logDebug(String message) {
// System.out.println("DEBUG: " + message);
// }
//
// @Override
// public void logWarn(String message) {
// System.out.println("WARN: " + message);
// }
//
// @Override
// public void logError(String message) {
// System.out.println("ERROR: " + message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (!suppressUploadProgress) {
// System.out.printf("Upload progress: %.0f%%\n", ((double) uploaded / (double) contentLength) * 100);
// }
// }
// }
|
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.deploy.standalone.StdOutOutputAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
|
package com.heroku.sdk.deploy;
// Entry point for running a WAR locally. Located in this package to be consistent with DeployJar and DeployWar.
public class RunWar {
public static void main(String[] args) throws IOException, InterruptedException {
final OutputAdapter outputAdapter = new StdOutOutputAdapter(false);
final String herokuWarFileSystemProperty = System.getProperty("heroku.warFile");
if (herokuWarFileSystemProperty == null ) {
outputAdapter.logError("Path to WAR file must be provided with heroku.warFile system property!");
System.exit(-1);
}
final Path warFilePath = Paths.get(herokuWarFileSystemProperty);
if (!Files.exists(warFilePath)) {
outputAdapter.logError(String.format("Could not find WAR file: %s.", warFilePath));
System.exit(-1);
}
final String webAppRunnerVersion
= System.getProperty("heroku.webappRunnerVersion", Constants.DEFAULT_WEBAPP_RUNNER_VERSION);
// Currently there is no support for adding java or webapp runner options. If you need more fine-tuned
// parameters, use webapp-runner directly.
final List<String> javaOptions = Collections.emptyList();
final List<String> webappRunnerOptions = Collections.emptyList();
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StdOutOutputAdapter.java
// public class StdOutOutputAdapter implements OutputAdapter {
// private boolean suppressUploadProgress;
//
// public StdOutOutputAdapter(boolean suppressUploadProgress) {
// this.suppressUploadProgress = suppressUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// System.out.println("INFO: " + message);
// }
//
// @Override
// public void logDebug(String message) {
// System.out.println("DEBUG: " + message);
// }
//
// @Override
// public void logWarn(String message) {
// System.out.println("WARN: " + message);
// }
//
// @Override
// public void logError(String message) {
// System.out.println("ERROR: " + message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (!suppressUploadProgress) {
// System.out.printf("Upload progress: %.0f%%\n", ((double) uploaded / (double) contentLength) * 100);
// }
// }
// }
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/RunWar.java
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.deploy.standalone.StdOutOutputAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
package com.heroku.sdk.deploy;
// Entry point for running a WAR locally. Located in this package to be consistent with DeployJar and DeployWar.
public class RunWar {
public static void main(String[] args) throws IOException, InterruptedException {
final OutputAdapter outputAdapter = new StdOutOutputAdapter(false);
final String herokuWarFileSystemProperty = System.getProperty("heroku.warFile");
if (herokuWarFileSystemProperty == null ) {
outputAdapter.logError("Path to WAR file must be provided with heroku.warFile system property!");
System.exit(-1);
}
final Path warFilePath = Paths.get(herokuWarFileSystemProperty);
if (!Files.exists(warFilePath)) {
outputAdapter.logError(String.format("Could not find WAR file: %s.", warFilePath));
System.exit(-1);
}
final String webAppRunnerVersion
= System.getProperty("heroku.webappRunnerVersion", Constants.DEFAULT_WEBAPP_RUNNER_VERSION);
// Currently there is no support for adding java or webapp runner options. If you need more fine-tuned
// parameters, use webapp-runner directly.
final List<String> javaOptions = Collections.emptyList();
final List<String> webappRunnerOptions = Collections.emptyList();
|
RunWebApp.run(warFilePath, javaOptions, webappRunnerOptions, webAppRunnerVersion, outputAdapter);
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/DeployMojo.java
|
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MojoExecutor.java
// public class MojoExecutor {
// public static void copyDependenciesToBuildDirectory(MavenProject mavenProject,
// MavenSession mavenSession,
// BuildPluginManager pluginManager) throws MojoExecutionException {
// executeMojo(
// plugin(
// groupId("org.apache.maven.plugins"),
// artifactId("maven-dependency-plugin"),
// version("2.4")
// ),
// goal("copy-dependencies"),
// configuration(
// element(name("outputDirectory"), "${project.build.directory}/dependency")
// ),
// executionEnvironment(
// mavenProject,
// mavenSession,
// pluginManager
// )
// );
// }
//
// public static Path createDependencyListFile(MavenProject mavenProject,
// MavenSession mavenSession,
// BuildPluginManager pluginManager) throws MojoExecutionException, IOException {
//
// Path path = Files.createTempFile("heroku-maven-plugin", "mvn-dependency-list.log");
//
// executeMojo(
// plugin(
// groupId("org.apache.maven.plugins"),
// artifactId("maven-dependency-plugin"),
// version("2.4")
// ),
// goal("list"),
// configuration(
// element(name("outputFile"), path.toString())
// ),
// executionEnvironment(
// mavenProject,
// mavenSession,
// pluginManager
// )
// );
//
// return path;
// }
// }
|
import com.heroku.sdk.maven.MojoExecutor;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
|
package com.heroku.sdk.maven.mojo;
/**
* Deploys an application to Heroku.
*/
@Mojo(name="deploy", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class DeployMojo extends AbstractHerokuDeployMojo {
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
/* We vendor all dependencies into the build directory before deploying to ensure those are copied to the source
* blob as well. This avoids that users have to explicitly copy dependencies to /target and is helpful in cases
* where the user does not have deep knowledge about the Maven build process and/or did not configure many of the
* plugins parameters.
*
* Advanced users should use DeployOnlyMojo which does not copy the dependencies.
*/
|
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MojoExecutor.java
// public class MojoExecutor {
// public static void copyDependenciesToBuildDirectory(MavenProject mavenProject,
// MavenSession mavenSession,
// BuildPluginManager pluginManager) throws MojoExecutionException {
// executeMojo(
// plugin(
// groupId("org.apache.maven.plugins"),
// artifactId("maven-dependency-plugin"),
// version("2.4")
// ),
// goal("copy-dependencies"),
// configuration(
// element(name("outputDirectory"), "${project.build.directory}/dependency")
// ),
// executionEnvironment(
// mavenProject,
// mavenSession,
// pluginManager
// )
// );
// }
//
// public static Path createDependencyListFile(MavenProject mavenProject,
// MavenSession mavenSession,
// BuildPluginManager pluginManager) throws MojoExecutionException, IOException {
//
// Path path = Files.createTempFile("heroku-maven-plugin", "mvn-dependency-list.log");
//
// executeMojo(
// plugin(
// groupId("org.apache.maven.plugins"),
// artifactId("maven-dependency-plugin"),
// version("2.4")
// ),
// goal("list"),
// configuration(
// element(name("outputFile"), path.toString())
// ),
// executionEnvironment(
// mavenProject,
// mavenSession,
// pluginManager
// )
// );
//
// return path;
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/DeployMojo.java
import com.heroku.sdk.maven.MojoExecutor;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
package com.heroku.sdk.maven.mojo;
/**
* Deploys an application to Heroku.
*/
@Mojo(name="deploy", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class DeployMojo extends AbstractHerokuDeployMojo {
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
/* We vendor all dependencies into the build directory before deploying to ensure those are copied to the source
* blob as well. This avoids that users have to explicitly copy dependencies to /target and is helpful in cases
* where the user does not have deep knowledge about the Maven build process and/or did not configure many of the
* plugins parameters.
*
* Advanced users should use DeployOnlyMojo which does not copy the dependencies.
*/
|
MojoExecutor.copyDependenciesToBuildDirectory(super.mavenProject, super.mavenSession, super.pluginManager);
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/RunWarMojo.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MavenLogOutputAdapter.java
// public class MavenLogOutputAdapter implements OutputAdapter {
// private final Log log;
// private boolean logUploadProgress;
//
// public MavenLogOutputAdapter(Log log, boolean logUploadProgress) {
// this.log = log;
// this.logUploadProgress = logUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// log.info(message);
// }
//
// @Override
// public void logDebug(String message) {
// log.debug(message);
// }
//
// @Override
// public void logWarn(String message) {
// log.warn(message);
// }
//
// @Override
// public void logError(String message) {
// log.error(message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (logUploadProgress) {
// log.debug("[" + uploaded + "/" + contentLength + "]");
// }
// }
// }
|
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.maven.MavenLogOutputAdapter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
|
package com.heroku.sdk.maven.mojo;
/**
* Starts the web application in a way that is very similar to how it is run on Heroku. JAVA_OPTS and WEBAPP_RUNNER_OPTS
* specified in configVars will also be picked up by this goal and used to run your application.
*/
@Mojo(name = "run-war", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class RunWarMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException {
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MavenLogOutputAdapter.java
// public class MavenLogOutputAdapter implements OutputAdapter {
// private final Log log;
// private boolean logUploadProgress;
//
// public MavenLogOutputAdapter(Log log, boolean logUploadProgress) {
// this.log = log;
// this.logUploadProgress = logUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// log.info(message);
// }
//
// @Override
// public void logDebug(String message) {
// log.debug(message);
// }
//
// @Override
// public void logWarn(String message) {
// log.warn(message);
// }
//
// @Override
// public void logError(String message) {
// log.error(message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (logUploadProgress) {
// log.debug("[" + uploaded + "/" + contentLength + "]");
// }
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/RunWarMojo.java
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.maven.MavenLogOutputAdapter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
package com.heroku.sdk.maven.mojo;
/**
* Starts the web application in a way that is very similar to how it is run on Heroku. JAVA_OPTS and WEBAPP_RUNNER_OPTS
* specified in configVars will also be picked up by this goal and used to run your application.
*/
@Mojo(name = "run-war", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class RunWarMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException {
|
OutputAdapter outputAdapter = new MavenLogOutputAdapter(getLog(), logProgress);
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/RunWarMojo.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MavenLogOutputAdapter.java
// public class MavenLogOutputAdapter implements OutputAdapter {
// private final Log log;
// private boolean logUploadProgress;
//
// public MavenLogOutputAdapter(Log log, boolean logUploadProgress) {
// this.log = log;
// this.logUploadProgress = logUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// log.info(message);
// }
//
// @Override
// public void logDebug(String message) {
// log.debug(message);
// }
//
// @Override
// public void logWarn(String message) {
// log.warn(message);
// }
//
// @Override
// public void logError(String message) {
// log.error(message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (logUploadProgress) {
// log.debug("[" + uploaded + "/" + contentLength + "]");
// }
// }
// }
|
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.maven.MavenLogOutputAdapter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
|
package com.heroku.sdk.maven.mojo;
/**
* Starts the web application in a way that is very similar to how it is run on Heroku. JAVA_OPTS and WEBAPP_RUNNER_OPTS
* specified in configVars will also be picked up by this goal and used to run your application.
*/
@Mojo(name = "run-war", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class RunWarMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException {
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MavenLogOutputAdapter.java
// public class MavenLogOutputAdapter implements OutputAdapter {
// private final Log log;
// private boolean logUploadProgress;
//
// public MavenLogOutputAdapter(Log log, boolean logUploadProgress) {
// this.log = log;
// this.logUploadProgress = logUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// log.info(message);
// }
//
// @Override
// public void logDebug(String message) {
// log.debug(message);
// }
//
// @Override
// public void logWarn(String message) {
// log.warn(message);
// }
//
// @Override
// public void logError(String message) {
// log.error(message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (logUploadProgress) {
// log.debug("[" + uploaded + "/" + contentLength + "]");
// }
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/RunWarMojo.java
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.maven.MavenLogOutputAdapter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
package com.heroku.sdk.maven.mojo;
/**
* Starts the web application in a way that is very similar to how it is run on Heroku. JAVA_OPTS and WEBAPP_RUNNER_OPTS
* specified in configVars will also be picked up by this goal and used to run your application.
*/
@Mojo(name = "run-war", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class RunWarMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException {
|
OutputAdapter outputAdapter = new MavenLogOutputAdapter(getLog(), logProgress);
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/RunWarMojo.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MavenLogOutputAdapter.java
// public class MavenLogOutputAdapter implements OutputAdapter {
// private final Log log;
// private boolean logUploadProgress;
//
// public MavenLogOutputAdapter(Log log, boolean logUploadProgress) {
// this.log = log;
// this.logUploadProgress = logUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// log.info(message);
// }
//
// @Override
// public void logDebug(String message) {
// log.debug(message);
// }
//
// @Override
// public void logWarn(String message) {
// log.warn(message);
// }
//
// @Override
// public void logError(String message) {
// log.error(message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (logUploadProgress) {
// log.debug("[" + uploaded + "/" + contentLength + "]");
// }
// }
// }
|
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.maven.MavenLogOutputAdapter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
|
package com.heroku.sdk.maven.mojo;
/**
* Starts the web application in a way that is very similar to how it is run on Heroku. JAVA_OPTS and WEBAPP_RUNNER_OPTS
* specified in configVars will also be picked up by this goal and used to run your application.
*/
@Mojo(name = "run-war", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class RunWarMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException {
OutputAdapter outputAdapter = new MavenLogOutputAdapter(getLog(), logProgress);
Path projectDirectory = super.mavenProject.getBasedir().toPath();
List<String> javaOptions = splitOptions(configVars.getOrDefault("JAVA_OPTS", ""));
List<String> webappRunnerOptions = splitOptions(configVars.getOrDefault("WEBAPP_RUNNER_OPTS", ""));
Path warFilePath = null;
try {
warFilePath = findWarFilePath(projectDirectory).orElseThrow(() -> new MojoExecutionException("Could not find WAR file to run!"));
} catch (IOException e) {
throw new MojoExecutionException("Could not find WAR file to run!", e);
}
try {
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/running/RunWebApp.java
// public class RunWebApp {
//
// public static void run(Path warFile, List<String> javaOptions, List<String> webappRunnerOptions, String webappRunnerVersion, OutputAdapter outputAdapter) throws IOException, InterruptedException {
// outputAdapter.logInfo("Downloading webapp-runner...");
// Path webappRunnerJarPath = null;
// try {
// webappRunnerJarPath = FileDownloader.download(WebappRunnerResolver.getUrlForVersion(webappRunnerVersion));
// } catch (FileNotFoundException e) {
// outputAdapter.logDebug(String.format("Could not download webapp-runner %s. Please check if this is a valid version.", webappRunnerVersion));
// System.exit(-1);
// }
//
// ArrayList<String> command = new ArrayList<>();
// command.add(System.getProperty("java.home") + File.separator + "bin" + File.separator + "java");
// command.addAll(javaOptions);
// command.add("-jar");
// command.add(webappRunnerJarPath.toString());
// command.addAll(webappRunnerOptions);
// command.add(warFile.toString());
//
// ProcessBuilder processBuilder = new ProcessBuilder(command.toArray(new String[0]));
// Process process = processBuilder.start();
//
// StreamGobbler stdOutStreamGobbler = new StreamGobbler(process.getInputStream(), outputAdapter);
// StreamGobbler stdErrStreamGobbler = new StreamGobbler(process.getErrorStream(), outputAdapter);
//
// stdOutStreamGobbler.start();
// stdErrStreamGobbler.start();
//
// process.waitFor();
// }
//
// private static class StreamGobbler extends Thread {
// private InputStream inputStream;
// private OutputAdapter outputAdapter;
//
// public StreamGobbler(InputStream inputStream, OutputAdapter outputAdapter) {
// super("StreamGobbler");
// this.inputStream = inputStream;
// this.outputAdapter = outputAdapter;
// }
//
// public void run() {
// try {
// BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
// String line;
//
// while ((line = br.readLine()) != null) {
// outputAdapter.logInfo(line);
// }
// } catch (IOException e) {
// outputAdapter.logError(e.getMessage(), e);
// throw new RuntimeException(e);
// }
// }
// }
// }
//
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/MavenLogOutputAdapter.java
// public class MavenLogOutputAdapter implements OutputAdapter {
// private final Log log;
// private boolean logUploadProgress;
//
// public MavenLogOutputAdapter(Log log, boolean logUploadProgress) {
// this.log = log;
// this.logUploadProgress = logUploadProgress;
// }
//
// @Override
// public void logInfo(String message) {
// log.info(message);
// }
//
// @Override
// public void logDebug(String message) {
// log.debug(message);
// }
//
// @Override
// public void logWarn(String message) {
// log.warn(message);
// }
//
// @Override
// public void logError(String message) {
// log.error(message);
// }
//
// @Override
// public void logUploadProgress(long uploaded, long contentLength) {
// if (logUploadProgress) {
// log.debug("[" + uploaded + "/" + contentLength + "]");
// }
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/RunWarMojo.java
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.lib.running.RunWebApp;
import com.heroku.sdk.maven.MavenLogOutputAdapter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
package com.heroku.sdk.maven.mojo;
/**
* Starts the web application in a way that is very similar to how it is run on Heroku. JAVA_OPTS and WEBAPP_RUNNER_OPTS
* specified in configVars will also be picked up by this goal and used to run your application.
*/
@Mojo(name = "run-war", requiresDependencyResolution = ResolutionScope.RUNTIME)
@Execute(phase = LifecyclePhase.PACKAGE)
public class RunWarMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException {
OutputAdapter outputAdapter = new MavenLogOutputAdapter(getLog(), logProgress);
Path projectDirectory = super.mavenProject.getBasedir().toPath();
List<String> javaOptions = splitOptions(configVars.getOrDefault("JAVA_OPTS", ""));
List<String> webappRunnerOptions = splitOptions(configVars.getOrDefault("WEBAPP_RUNNER_OPTS", ""));
Path warFilePath = null;
try {
warFilePath = findWarFilePath(projectDirectory).orElseThrow(() -> new MojoExecutionException("Could not find WAR file to run!"));
} catch (IOException e) {
throw new MojoExecutionException("Could not find WAR file to run!", e);
}
try {
|
RunWebApp.run(warFilePath, javaOptions, webappRunnerOptions, webappRunnerVersion, outputAdapter);
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/DashboardMojo.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/resolver/AppNameResolver.java
// public class AppNameResolver {
//
// /**
// * Resolves the Heroku app name based on the given project directory.
// * This uses the already established resolution order from 2.x to ensure backwards compatibility:
// *
// * 1. The heroku.properties file
// * 2. The heroku.appName system property
// * 3. Custom resolution
// * 4. Git remote pointing to an Heroku app
// *
// * @param projectDirectory The projects root directory.
// * @param customResolver A custom resolver for the app name. Most likely a setting from a build tool like Maven or sbt.
// * @return If possible, the resolved app name.
// * @throws IOException Resolving requires IO operations which might fail.
// */
// public static Optional<String> resolve(Path projectDirectory, Supplier<Optional<String>> customResolver) throws IOException {
// Optional<String> herokuPropertiesAppName = resolveViaHerokuPropertiesFile(projectDirectory);
// if (herokuPropertiesAppName.isPresent()) {
// return herokuPropertiesAppName;
// }
//
// Optional<String> systemPropertiesAppName = resolveViaSystemProperty();
// if (systemPropertiesAppName.isPresent()) {
// return systemPropertiesAppName;
// }
//
// Optional<String> customResolverAppName = customResolver.get();
// if (customResolverAppName.isPresent()) {
// return customResolverAppName;
// }
//
// return resolveViaHerokuGitRemote(projectDirectory);
// }
//
// private static Optional<String> resolveViaHerokuGitRemote(Path rootDirectory) throws IOException {
// try {
// Git gitRepo = Git.open(rootDirectory.toFile());
// Config config = gitRepo.getRepository().getConfig();
//
// for (String remoteName : config.getSubsections("remote")) {
// String remoteUrl = config.getString("remote", remoteName, "url");
//
// for (Pattern gitRemoteUrlAppNamePattern : GIT_REMOTE_URL_APP_NAME_PATTERNS) {
// Matcher matcher = gitRemoteUrlAppNamePattern.matcher(remoteUrl);
// if (matcher.matches()) {
// return Optional.of(matcher.group(1));
// }
// }
// }
// } catch (RepositoryNotFoundException e) {
// return Optional.empty();
// }
//
// return Optional.empty();
// }
//
// private static Optional<String> resolveViaHerokuPropertiesFile(Path rootDirectory) throws IOException {
// Properties properties = new Properties();
//
// try {
// properties.load(new FileInputStream(rootDirectory.resolve("heroku.properties").toFile()));
// } catch (FileNotFoundException e) {
// return Optional.empty();
// }
//
// return Optional.ofNullable(properties.getProperty("heroku.appName"));
// }
//
// private static Optional<String> resolveViaSystemProperty() {
// return Optional.ofNullable(System.getProperty("heroku.appName"));
// }
//
// private static final List<Pattern> GIT_REMOTE_URL_APP_NAME_PATTERNS;
//
// static {
// ArrayList<Pattern> patterns = new ArrayList<>();
// patterns.add(Pattern.compile("https://git\\.heroku\\.com/(.*?)\\.git"));
// patterns.add(Pattern.compile("git@heroku\\.com:(.*?)\\.git"));
// GIT_REMOTE_URL_APP_NAME_PATTERNS = Collections.unmodifiableList(patterns);
// }
// }
|
import com.heroku.sdk.deploy.lib.resolver.AppNameResolver;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import java.net.URI;
import java.util.Optional;
|
package com.heroku.sdk.maven.mojo;
@Mojo(name="dashboard")
public class DashboardMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
try {
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/resolver/AppNameResolver.java
// public class AppNameResolver {
//
// /**
// * Resolves the Heroku app name based on the given project directory.
// * This uses the already established resolution order from 2.x to ensure backwards compatibility:
// *
// * 1. The heroku.properties file
// * 2. The heroku.appName system property
// * 3. Custom resolution
// * 4. Git remote pointing to an Heroku app
// *
// * @param projectDirectory The projects root directory.
// * @param customResolver A custom resolver for the app name. Most likely a setting from a build tool like Maven or sbt.
// * @return If possible, the resolved app name.
// * @throws IOException Resolving requires IO operations which might fail.
// */
// public static Optional<String> resolve(Path projectDirectory, Supplier<Optional<String>> customResolver) throws IOException {
// Optional<String> herokuPropertiesAppName = resolveViaHerokuPropertiesFile(projectDirectory);
// if (herokuPropertiesAppName.isPresent()) {
// return herokuPropertiesAppName;
// }
//
// Optional<String> systemPropertiesAppName = resolveViaSystemProperty();
// if (systemPropertiesAppName.isPresent()) {
// return systemPropertiesAppName;
// }
//
// Optional<String> customResolverAppName = customResolver.get();
// if (customResolverAppName.isPresent()) {
// return customResolverAppName;
// }
//
// return resolveViaHerokuGitRemote(projectDirectory);
// }
//
// private static Optional<String> resolveViaHerokuGitRemote(Path rootDirectory) throws IOException {
// try {
// Git gitRepo = Git.open(rootDirectory.toFile());
// Config config = gitRepo.getRepository().getConfig();
//
// for (String remoteName : config.getSubsections("remote")) {
// String remoteUrl = config.getString("remote", remoteName, "url");
//
// for (Pattern gitRemoteUrlAppNamePattern : GIT_REMOTE_URL_APP_NAME_PATTERNS) {
// Matcher matcher = gitRemoteUrlAppNamePattern.matcher(remoteUrl);
// if (matcher.matches()) {
// return Optional.of(matcher.group(1));
// }
// }
// }
// } catch (RepositoryNotFoundException e) {
// return Optional.empty();
// }
//
// return Optional.empty();
// }
//
// private static Optional<String> resolveViaHerokuPropertiesFile(Path rootDirectory) throws IOException {
// Properties properties = new Properties();
//
// try {
// properties.load(new FileInputStream(rootDirectory.resolve("heroku.properties").toFile()));
// } catch (FileNotFoundException e) {
// return Optional.empty();
// }
//
// return Optional.ofNullable(properties.getProperty("heroku.appName"));
// }
//
// private static Optional<String> resolveViaSystemProperty() {
// return Optional.ofNullable(System.getProperty("heroku.appName"));
// }
//
// private static final List<Pattern> GIT_REMOTE_URL_APP_NAME_PATTERNS;
//
// static {
// ArrayList<Pattern> patterns = new ArrayList<>();
// patterns.add(Pattern.compile("https://git\\.heroku\\.com/(.*?)\\.git"));
// patterns.add(Pattern.compile("git@heroku\\.com:(.*?)\\.git"));
// GIT_REMOTE_URL_APP_NAME_PATTERNS = Collections.unmodifiableList(patterns);
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/DashboardMojo.java
import com.heroku.sdk.deploy.lib.resolver.AppNameResolver;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import java.net.URI;
import java.util.Optional;
package com.heroku.sdk.maven.mojo;
@Mojo(name="dashboard")
public class DashboardMojo extends AbstractHerokuMojo {
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
try {
|
AppNameResolver.resolve(super.mavenProject.getBasedir().toPath(), () -> Optional.ofNullable(super.appName));
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/main/java/com/heroku/sdk/deploy/api/HerokuDeployApi.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PropertiesUtils.java
// public class PropertiesUtils {
//
// public static <T> Properties loadOrEmptyFromClasspath(Class<T> clazz, String name) {
// final Properties properties = new Properties();
// try (final InputStream stream = clazz.getResourceAsStream(name)) {
// if (stream != null) {
// properties.load(stream);
// }
// } catch (IOException e) {
// // Ignore exception, this will return empty properties later.
// }
//
// return properties;
// }
//
// /**
// * Loads the pom.properties for an artifact from classpath.
// *
// * @param clazz The class of which classloader should be used to read the pom.properties
// * @param groupId The groupId for which the pom.properties should be loaded
// * @param artifactId The artifactId for which the pom.properties should be loaded
// * @param <T> The type of the class modeled by the given Class object
// * @return The loaded properties
// *
// * @see <a href="http://maven.apache.org/shared/maven-archiver/#class_archive">http://maven.apache.org/shared/maven-archiver/#class_archive</a>
// */
// public static <T> Properties loadPomPropertiesOrEmptyFromClasspath(Class<T> clazz, String groupId, String artifactId) {
// return loadOrEmptyFromClasspath(clazz, String.format("/META-INF/maven/%s/%s/pom.properties", groupId, artifactId));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/Util.java
// public class Util {
// public static Stream<String> readLinesFromInputStream(InputStream inputStream) throws IOException {
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
//
// try {
// return bufferedReader.lines();
// } catch (UncheckedIOException e) {
// throw e.getCause();
// }
// }
// }
|
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.PropertiesUtils;
import com.heroku.sdk.deploy.util.Util;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.eclipse.jgit.util.Base64;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
|
package com.heroku.sdk.deploy.api;
public class HerokuDeployApi {
private Map<String, String> httpHeaders;
public HerokuDeployApi(String client, String clientVersion, String apiKey) {
Properties pomProperties
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PropertiesUtils.java
// public class PropertiesUtils {
//
// public static <T> Properties loadOrEmptyFromClasspath(Class<T> clazz, String name) {
// final Properties properties = new Properties();
// try (final InputStream stream = clazz.getResourceAsStream(name)) {
// if (stream != null) {
// properties.load(stream);
// }
// } catch (IOException e) {
// // Ignore exception, this will return empty properties later.
// }
//
// return properties;
// }
//
// /**
// * Loads the pom.properties for an artifact from classpath.
// *
// * @param clazz The class of which classloader should be used to read the pom.properties
// * @param groupId The groupId for which the pom.properties should be loaded
// * @param artifactId The artifactId for which the pom.properties should be loaded
// * @param <T> The type of the class modeled by the given Class object
// * @return The loaded properties
// *
// * @see <a href="http://maven.apache.org/shared/maven-archiver/#class_archive">http://maven.apache.org/shared/maven-archiver/#class_archive</a>
// */
// public static <T> Properties loadPomPropertiesOrEmptyFromClasspath(Class<T> clazz, String groupId, String artifactId) {
// return loadOrEmptyFromClasspath(clazz, String.format("/META-INF/maven/%s/%s/pom.properties", groupId, artifactId));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/Util.java
// public class Util {
// public static Stream<String> readLinesFromInputStream(InputStream inputStream) throws IOException {
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
//
// try {
// return bufferedReader.lines();
// } catch (UncheckedIOException e) {
// throw e.getCause();
// }
// }
// }
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/api/HerokuDeployApi.java
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.PropertiesUtils;
import com.heroku.sdk.deploy.util.Util;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.eclipse.jgit.util.Base64;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
package com.heroku.sdk.deploy.api;
public class HerokuDeployApi {
private Map<String, String> httpHeaders;
public HerokuDeployApi(String client, String clientVersion, String apiKey) {
Properties pomProperties
|
= PropertiesUtils.loadPomPropertiesOrEmptyFromClasspath(this.getClass(), "com.heroku.sdk", "heroku-deploy");
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/main/java/com/heroku/sdk/deploy/api/HerokuDeployApi.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PropertiesUtils.java
// public class PropertiesUtils {
//
// public static <T> Properties loadOrEmptyFromClasspath(Class<T> clazz, String name) {
// final Properties properties = new Properties();
// try (final InputStream stream = clazz.getResourceAsStream(name)) {
// if (stream != null) {
// properties.load(stream);
// }
// } catch (IOException e) {
// // Ignore exception, this will return empty properties later.
// }
//
// return properties;
// }
//
// /**
// * Loads the pom.properties for an artifact from classpath.
// *
// * @param clazz The class of which classloader should be used to read the pom.properties
// * @param groupId The groupId for which the pom.properties should be loaded
// * @param artifactId The artifactId for which the pom.properties should be loaded
// * @param <T> The type of the class modeled by the given Class object
// * @return The loaded properties
// *
// * @see <a href="http://maven.apache.org/shared/maven-archiver/#class_archive">http://maven.apache.org/shared/maven-archiver/#class_archive</a>
// */
// public static <T> Properties loadPomPropertiesOrEmptyFromClasspath(Class<T> clazz, String groupId, String artifactId) {
// return loadOrEmptyFromClasspath(clazz, String.format("/META-INF/maven/%s/%s/pom.properties", groupId, artifactId));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/Util.java
// public class Util {
// public static Stream<String> readLinesFromInputStream(InputStream inputStream) throws IOException {
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
//
// try {
// return bufferedReader.lines();
// } catch (UncheckedIOException e) {
// throw e.getCause();
// }
// }
// }
|
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.PropertiesUtils;
import com.heroku.sdk.deploy.util.Util;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.eclipse.jgit.util.Base64;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
|
System.getProperty("java.vendor")));
this.httpHeaders = httpHeaders;
}
public BuildInfo createBuild(String appName, URI sourceBlob, String sourceBlobVersion, List<String> buildpacks) throws IOException, HerokuDeployApiException {
// Create API payload
ObjectMapper mapper = new ObjectMapper();
ObjectNode root = mapper.createObjectNode();
ObjectNode sourceBlobObject = root.putObject("source_blob");
sourceBlobObject.put("url", sourceBlob.toString());
sourceBlobObject.put("version", sourceBlobVersion);
ArrayNode buildpacksArray = root.putArray("buildpacks");
buildpacks.forEach(buildpackString -> {
ObjectNode buildpackObjectNode = buildpacksArray.addObject();
if (buildpackString.startsWith("http")) {
buildpackObjectNode.put("url", buildpackString);
} else {
buildpackObjectNode.put("name", buildpackString);
}
});
StringEntity apiPayloadEntity = new StringEntity(root.toString());
apiPayloadEntity.setContentType("application/json");
apiPayloadEntity.setContentEncoding("UTF-8");
// Send request
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PropertiesUtils.java
// public class PropertiesUtils {
//
// public static <T> Properties loadOrEmptyFromClasspath(Class<T> clazz, String name) {
// final Properties properties = new Properties();
// try (final InputStream stream = clazz.getResourceAsStream(name)) {
// if (stream != null) {
// properties.load(stream);
// }
// } catch (IOException e) {
// // Ignore exception, this will return empty properties later.
// }
//
// return properties;
// }
//
// /**
// * Loads the pom.properties for an artifact from classpath.
// *
// * @param clazz The class of which classloader should be used to read the pom.properties
// * @param groupId The groupId for which the pom.properties should be loaded
// * @param artifactId The artifactId for which the pom.properties should be loaded
// * @param <T> The type of the class modeled by the given Class object
// * @return The loaded properties
// *
// * @see <a href="http://maven.apache.org/shared/maven-archiver/#class_archive">http://maven.apache.org/shared/maven-archiver/#class_archive</a>
// */
// public static <T> Properties loadPomPropertiesOrEmptyFromClasspath(Class<T> clazz, String groupId, String artifactId) {
// return loadOrEmptyFromClasspath(clazz, String.format("/META-INF/maven/%s/%s/pom.properties", groupId, artifactId));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/Util.java
// public class Util {
// public static Stream<String> readLinesFromInputStream(InputStream inputStream) throws IOException {
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
//
// try {
// return bufferedReader.lines();
// } catch (UncheckedIOException e) {
// throw e.getCause();
// }
// }
// }
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/api/HerokuDeployApi.java
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.PropertiesUtils;
import com.heroku.sdk.deploy.util.Util;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.eclipse.jgit.util.Base64;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
System.getProperty("java.vendor")));
this.httpHeaders = httpHeaders;
}
public BuildInfo createBuild(String appName, URI sourceBlob, String sourceBlobVersion, List<String> buildpacks) throws IOException, HerokuDeployApiException {
// Create API payload
ObjectMapper mapper = new ObjectMapper();
ObjectNode root = mapper.createObjectNode();
ObjectNode sourceBlobObject = root.putObject("source_blob");
sourceBlobObject.put("url", sourceBlob.toString());
sourceBlobObject.put("version", sourceBlobVersion);
ArrayNode buildpacksArray = root.putArray("buildpacks");
buildpacks.forEach(buildpackString -> {
ObjectNode buildpackObjectNode = buildpacksArray.addObject();
if (buildpackString.startsWith("http")) {
buildpackObjectNode.put("url", buildpackString);
} else {
buildpackObjectNode.put("name", buildpackString);
}
});
StringEntity apiPayloadEntity = new StringEntity(root.toString());
apiPayloadEntity.setContentType("application/json");
apiPayloadEntity.setContentEncoding("UTF-8");
// Send request
|
CloseableHttpClient client = CustomHttpClientBuilder.build();
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/main/java/com/heroku/sdk/deploy/api/HerokuDeployApi.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PropertiesUtils.java
// public class PropertiesUtils {
//
// public static <T> Properties loadOrEmptyFromClasspath(Class<T> clazz, String name) {
// final Properties properties = new Properties();
// try (final InputStream stream = clazz.getResourceAsStream(name)) {
// if (stream != null) {
// properties.load(stream);
// }
// } catch (IOException e) {
// // Ignore exception, this will return empty properties later.
// }
//
// return properties;
// }
//
// /**
// * Loads the pom.properties for an artifact from classpath.
// *
// * @param clazz The class of which classloader should be used to read the pom.properties
// * @param groupId The groupId for which the pom.properties should be loaded
// * @param artifactId The artifactId for which the pom.properties should be loaded
// * @param <T> The type of the class modeled by the given Class object
// * @return The loaded properties
// *
// * @see <a href="http://maven.apache.org/shared/maven-archiver/#class_archive">http://maven.apache.org/shared/maven-archiver/#class_archive</a>
// */
// public static <T> Properties loadPomPropertiesOrEmptyFromClasspath(Class<T> clazz, String groupId, String artifactId) {
// return loadOrEmptyFromClasspath(clazz, String.format("/META-INF/maven/%s/%s/pom.properties", groupId, artifactId));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/Util.java
// public class Util {
// public static Stream<String> readLinesFromInputStream(InputStream inputStream) throws IOException {
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
//
// try {
// return bufferedReader.lines();
// } catch (UncheckedIOException e) {
// throw e.getCause();
// }
// }
// }
|
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.PropertiesUtils;
import com.heroku.sdk.deploy.util.Util;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.eclipse.jgit.util.Base64;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
|
HttpPost request = new HttpPost("https://api.heroku.com/apps/" + appName + "/builds");
httpHeaders.forEach(request::setHeader);
request.setEntity(apiPayloadEntity);
CloseableHttpResponse response = client.execute(request);
return handleBuildInfoResponse(appName, mapper, response);
}
public BuildInfo getBuildInfo(String appName, String buildId) throws IOException, HerokuDeployApiException {
ObjectMapper mapper = new ObjectMapper();
CloseableHttpClient client = CustomHttpClientBuilder.build();
HttpUriRequest request = new HttpGet("https://api.heroku.com/apps/" + appName + "/builds/" + buildId);
httpHeaders.forEach(request::setHeader);
CloseableHttpResponse response = client.execute(request);
return handleBuildInfoResponse(appName, mapper, response);
}
public Stream<String> followBuildOutputStream(URI buildOutputStreamUri) throws IOException {
CloseableHttpClient client = CustomHttpClientBuilder.build();
HttpGet request = new HttpGet(buildOutputStreamUri);
httpHeaders.forEach(request::setHeader);
CloseableHttpResponse response = client.execute(request);
HttpEntity responseEntity = response.getEntity();
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PropertiesUtils.java
// public class PropertiesUtils {
//
// public static <T> Properties loadOrEmptyFromClasspath(Class<T> clazz, String name) {
// final Properties properties = new Properties();
// try (final InputStream stream = clazz.getResourceAsStream(name)) {
// if (stream != null) {
// properties.load(stream);
// }
// } catch (IOException e) {
// // Ignore exception, this will return empty properties later.
// }
//
// return properties;
// }
//
// /**
// * Loads the pom.properties for an artifact from classpath.
// *
// * @param clazz The class of which classloader should be used to read the pom.properties
// * @param groupId The groupId for which the pom.properties should be loaded
// * @param artifactId The artifactId for which the pom.properties should be loaded
// * @param <T> The type of the class modeled by the given Class object
// * @return The loaded properties
// *
// * @see <a href="http://maven.apache.org/shared/maven-archiver/#class_archive">http://maven.apache.org/shared/maven-archiver/#class_archive</a>
// */
// public static <T> Properties loadPomPropertiesOrEmptyFromClasspath(Class<T> clazz, String groupId, String artifactId) {
// return loadOrEmptyFromClasspath(clazz, String.format("/META-INF/maven/%s/%s/pom.properties", groupId, artifactId));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/Util.java
// public class Util {
// public static Stream<String> readLinesFromInputStream(InputStream inputStream) throws IOException {
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
//
// try {
// return bufferedReader.lines();
// } catch (UncheckedIOException e) {
// throw e.getCause();
// }
// }
// }
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/api/HerokuDeployApi.java
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.PropertiesUtils;
import com.heroku.sdk.deploy.util.Util;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.eclipse.jgit.util.Base64;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
HttpPost request = new HttpPost("https://api.heroku.com/apps/" + appName + "/builds");
httpHeaders.forEach(request::setHeader);
request.setEntity(apiPayloadEntity);
CloseableHttpResponse response = client.execute(request);
return handleBuildInfoResponse(appName, mapper, response);
}
public BuildInfo getBuildInfo(String appName, String buildId) throws IOException, HerokuDeployApiException {
ObjectMapper mapper = new ObjectMapper();
CloseableHttpClient client = CustomHttpClientBuilder.build();
HttpUriRequest request = new HttpGet("https://api.heroku.com/apps/" + appName + "/builds/" + buildId);
httpHeaders.forEach(request::setHeader);
CloseableHttpResponse response = client.execute(request);
return handleBuildInfoResponse(appName, mapper, response);
}
public Stream<String> followBuildOutputStream(URI buildOutputStreamUri) throws IOException {
CloseableHttpClient client = CustomHttpClientBuilder.build();
HttpGet request = new HttpGet(buildOutputStreamUri);
httpHeaders.forEach(request::setHeader);
CloseableHttpResponse response = client.execute(request);
HttpEntity responseEntity = response.getEntity();
|
return Util.readLinesFromInputStream(responseEntity.getContent());
|
heroku/heroku-maven-plugin
|
heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/DeployJar.java
|
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StandaloneDeploy.java
// public class StandaloneDeploy {
// private static OutputAdapter outputAdapter = new StdOutOutputAdapter(true);
//
// public static void deploy(Mode mode) throws IOException, InterruptedException {
// final Path projectDirectory = Paths.get(System.getProperty("user.dir"));
//
//
// final Optional<String> appName = AppNameResolver.resolve(projectDirectory, Optional::empty);
// if (!appName.isPresent()) {
// outputAdapter.logError("Heroku app name must be provided.");
// System.exit(-1);
// }
//
// Optional<String> apiKey = ApiKeyResolver.resolve(projectDirectory);
// if (!apiKey.isPresent()) {
// outputAdapter.logError("Heroku API key must be provided.");
// System.exit(-1);
// }
//
// Procfile defaultProcfile = Procfile.empty();
// List<Path> includedPaths = getIncludedPathsFromProperties();
//
// switch (mode) {
// case JAR:
// final String herokuJarFileSystemProperty = System.getProperty("heroku.jarFile");
// final String herokuJarOptsSystemProperty = System.getProperty("heroku.jarOpts", "");
//
// if (herokuJarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing JAR file must be provided with heroku.jarFile system property!");
// System.exit(-1);
// }
//
// final Path localJarFilePath = Paths.get(herokuJarFileSystemProperty);
// includedPaths.add(localJarFilePath);
//
// String jarCommand = String.format(
// "java $JAVA_OPTS -jar %s %s $JAR_OPTS",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localJarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""),
// herokuJarOptsSystemProperty);
//
// defaultProcfile = Procfile.singleton("web", jarCommand);
// break;
// case WAR:
// final String herokuWarFileSystemProperty = System.getProperty("heroku.warFile");
//
// if (herokuWarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing WAR file must be provided with heroku.warFile system property!");
// System.exit(-1);
// }
//
// final Path localWarFilePath = Paths.get(herokuWarFileSystemProperty);
// includedPaths.add(localWarFilePath);
//
// String warCommand = String.format(
// "java $JAVA_OPTS -jar webapp-runner.jar $WEBAPP_RUNNER_OPTS --port $PORT ./ %s",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localWarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""));
//
// defaultProcfile = Procfile.singleton("web", warCommand);
// break;
// }
//
// SourceBlobDescriptor sourceBlobDescriptor = JvmProjectSourceBlobCreator.create(
// projectDirectory,
// "heroku-deploy-standalone",
// includedPaths,
// Procfile::empty,
// defaultProcfile,
// Optional::empty,
// outputAdapter
// );
//
// if (mode == Mode.WAR) {
// URI webappRunnerUri = Optional
// .ofNullable(System.getProperty("heroku.webappRunnerUrl", null))
// .map(URI::create)
// .orElseGet(() -> {
// String version = System.getProperty("heroku.webappRunnerVersion", Constants.DEFAULT_WEBAPP_RUNNER_VERSION);
// return WebappRunnerResolver.getUrlForVersion(version);
// });
//
// Path webappRunnerLocalPath = FileDownloader.download(webappRunnerUri);
// sourceBlobDescriptor.addLocalPath("webapp-runner.jar", webappRunnerLocalPath, true);
// }
//
// Path sourceBlobArchive = SourceBlobPackager.pack(sourceBlobDescriptor, outputAdapter);
//
// DeploymentDescriptor deploymentDescriptor = new DeploymentDescriptor(
// appName.get(),
// getBuildpacksFromProperties(),
// Collections.emptyMap(),
// sourceBlobArchive,
// GitUtils.getHeadCommitHash(projectDirectory).orElse("unknown"));
//
// Properties pomProperties = PropertiesUtils.loadPomPropertiesOrEmptyFromClasspath(StandaloneDeploy.class, "com.heroku.sdk", "heroku-deploy-standalone");
//
// Deployer.deploy(
// apiKey.get(),
// "heroku-deploy-standalone",
// pomProperties.getProperty("version", "unknown"),
// deploymentDescriptor,
// outputAdapter);
// }
//
// private static List<Path> getIncludedPathsFromProperties() {
// final String herokuIncludes = System.getProperty("heroku.includes");
//
// if (herokuIncludes == null) {
// return new ArrayList<>();
// }
//
// List<Path> includedPaths = new ArrayList<>();
// for (String filePathString : herokuIncludes.split(File.pathSeparator)) {
// includedPaths.add(Paths.get(filePathString));
// }
//
// return includedPaths;
// }
//
// private static List<String> getBuildpacksFromProperties() {
// String buildpacksString = System.getProperty("heroku.buildpacks");
//
// if (buildpacksString == null) {
// return Collections.emptyList();
// }
//
// String buildpacksDelim = System.getProperty("heroku.buildpacksDelim", ",");
// return Arrays.asList(buildpacksString.split(Pattern.quote(buildpacksDelim)));
// }
//
// public enum Mode {
// JAR, WAR
// }
// }
|
import com.heroku.sdk.deploy.standalone.StandaloneDeploy;
import java.io.IOException;
|
package com.heroku.sdk.deploy;
// Entry point for standalone JAR deployment. Located in this package to provide backwards comparability with 2.x.
public class DeployJar {
public static void main(String[] args) throws IOException, InterruptedException {
|
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StandaloneDeploy.java
// public class StandaloneDeploy {
// private static OutputAdapter outputAdapter = new StdOutOutputAdapter(true);
//
// public static void deploy(Mode mode) throws IOException, InterruptedException {
// final Path projectDirectory = Paths.get(System.getProperty("user.dir"));
//
//
// final Optional<String> appName = AppNameResolver.resolve(projectDirectory, Optional::empty);
// if (!appName.isPresent()) {
// outputAdapter.logError("Heroku app name must be provided.");
// System.exit(-1);
// }
//
// Optional<String> apiKey = ApiKeyResolver.resolve(projectDirectory);
// if (!apiKey.isPresent()) {
// outputAdapter.logError("Heroku API key must be provided.");
// System.exit(-1);
// }
//
// Procfile defaultProcfile = Procfile.empty();
// List<Path> includedPaths = getIncludedPathsFromProperties();
//
// switch (mode) {
// case JAR:
// final String herokuJarFileSystemProperty = System.getProperty("heroku.jarFile");
// final String herokuJarOptsSystemProperty = System.getProperty("heroku.jarOpts", "");
//
// if (herokuJarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing JAR file must be provided with heroku.jarFile system property!");
// System.exit(-1);
// }
//
// final Path localJarFilePath = Paths.get(herokuJarFileSystemProperty);
// includedPaths.add(localJarFilePath);
//
// String jarCommand = String.format(
// "java $JAVA_OPTS -jar %s %s $JAR_OPTS",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localJarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""),
// herokuJarOptsSystemProperty);
//
// defaultProcfile = Procfile.singleton("web", jarCommand);
// break;
// case WAR:
// final String herokuWarFileSystemProperty = System.getProperty("heroku.warFile");
//
// if (herokuWarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing WAR file must be provided with heroku.warFile system property!");
// System.exit(-1);
// }
//
// final Path localWarFilePath = Paths.get(herokuWarFileSystemProperty);
// includedPaths.add(localWarFilePath);
//
// String warCommand = String.format(
// "java $JAVA_OPTS -jar webapp-runner.jar $WEBAPP_RUNNER_OPTS --port $PORT ./ %s",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localWarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""));
//
// defaultProcfile = Procfile.singleton("web", warCommand);
// break;
// }
//
// SourceBlobDescriptor sourceBlobDescriptor = JvmProjectSourceBlobCreator.create(
// projectDirectory,
// "heroku-deploy-standalone",
// includedPaths,
// Procfile::empty,
// defaultProcfile,
// Optional::empty,
// outputAdapter
// );
//
// if (mode == Mode.WAR) {
// URI webappRunnerUri = Optional
// .ofNullable(System.getProperty("heroku.webappRunnerUrl", null))
// .map(URI::create)
// .orElseGet(() -> {
// String version = System.getProperty("heroku.webappRunnerVersion", Constants.DEFAULT_WEBAPP_RUNNER_VERSION);
// return WebappRunnerResolver.getUrlForVersion(version);
// });
//
// Path webappRunnerLocalPath = FileDownloader.download(webappRunnerUri);
// sourceBlobDescriptor.addLocalPath("webapp-runner.jar", webappRunnerLocalPath, true);
// }
//
// Path sourceBlobArchive = SourceBlobPackager.pack(sourceBlobDescriptor, outputAdapter);
//
// DeploymentDescriptor deploymentDescriptor = new DeploymentDescriptor(
// appName.get(),
// getBuildpacksFromProperties(),
// Collections.emptyMap(),
// sourceBlobArchive,
// GitUtils.getHeadCommitHash(projectDirectory).orElse("unknown"));
//
// Properties pomProperties = PropertiesUtils.loadPomPropertiesOrEmptyFromClasspath(StandaloneDeploy.class, "com.heroku.sdk", "heroku-deploy-standalone");
//
// Deployer.deploy(
// apiKey.get(),
// "heroku-deploy-standalone",
// pomProperties.getProperty("version", "unknown"),
// deploymentDescriptor,
// outputAdapter);
// }
//
// private static List<Path> getIncludedPathsFromProperties() {
// final String herokuIncludes = System.getProperty("heroku.includes");
//
// if (herokuIncludes == null) {
// return new ArrayList<>();
// }
//
// List<Path> includedPaths = new ArrayList<>();
// for (String filePathString : herokuIncludes.split(File.pathSeparator)) {
// includedPaths.add(Paths.get(filePathString));
// }
//
// return includedPaths;
// }
//
// private static List<String> getBuildpacksFromProperties() {
// String buildpacksString = System.getProperty("heroku.buildpacks");
//
// if (buildpacksString == null) {
// return Collections.emptyList();
// }
//
// String buildpacksDelim = System.getProperty("heroku.buildpacksDelim", ",");
// return Arrays.asList(buildpacksString.split(Pattern.quote(buildpacksDelim)));
// }
//
// public enum Mode {
// JAR, WAR
// }
// }
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/DeployJar.java
import com.heroku.sdk.deploy.standalone.StandaloneDeploy;
import java.io.IOException;
package com.heroku.sdk.deploy;
// Entry point for standalone JAR deployment. Located in this package to provide backwards comparability with 2.x.
public class DeployJar {
public static void main(String[] args) throws IOException, InterruptedException {
|
StandaloneDeploy.deploy(StandaloneDeploy.Mode.JAR);
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/test/java/com/heroku/sdk/deploy/lib/PathUtilsTest.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PathUtils.java
// public class PathUtils {
//
// public static List<Path> normalizeAll(Path basePath, List<Path> paths) {
// List<Path> normalizedPaths = new ArrayList<>();
// for (Path path : paths) {
// normalize(basePath, path).ifPresent(normalizedPaths::add);
// }
//
// return normalizedPaths;
// }
//
// public static Optional<Path> normalize(Path basePath, Path path) {
// Path absoluteBasePath = basePath.toAbsolutePath();
// Path normalizedAbsolutePath = absoluteBasePath.resolve(path).normalize();
//
// if (normalizedAbsolutePath.startsWith(absoluteBasePath)) {
// return Optional.of(absoluteBasePath.relativize(normalizedAbsolutePath));
// }
//
// return Optional.empty();
// }
//
// public static boolean isValidPath(Path basePath, Path path) {
// return normalize(basePath, path).isPresent();
// }
//
// public static List<Path> expandDirectories(Path basePath, List<Path> paths) throws IOException {
// ArrayList<Path> result = new ArrayList<>();
// for (Path path : paths) {
// result.addAll(expandDirectory(basePath, path));
// }
//
// return result;
// }
//
// public static List<Path> expandDirectory(Path basePath, Path path) throws IOException {
// return Files
// .walk(basePath.resolve(path).normalize())
// .filter(subPath -> !Files.isDirectory(subPath))
// .map(subPath -> normalize(basePath, subPath))
// .filter(Optional::isPresent)
// .map(Optional::get)
// .collect(Collectors.toList());
// }
//
// public static String separatorsToUnix(Path path) {
// // Path will normalize separators back to Windows when run on Windows. We have to fall back to a String here.
// return path.toString().replace('\\', '/');
// }
// }
|
import com.heroku.sdk.deploy.util.PathUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
|
package com.heroku.sdk.deploy.lib;
public class PathUtilsTest {
private Path basePath = Paths.get("/home/user/projects/project");
private Path existingBasePath;
@Test
public void testNormalizeAll() {
ArrayList<Path> pathsToNormalize = new ArrayList<>();
ArrayList<Path> expectedNormalizedPaths = new ArrayList<>();
pathsToNormalize.add(Paths.get("README.md"));
expectedNormalizedPaths.add(Paths.get("README.md"));
pathsToNormalize.add(Paths.get("docs//HACKING.md"));
expectedNormalizedPaths.add(Paths.get("docs/HACKING.md"));
pathsToNormalize.add(Paths.get("target/dependencies/../project.war"));
expectedNormalizedPaths.add(Paths.get("target/project.war"));
pathsToNormalize.add(Paths.get("/home/user/projects/project/target/dependencies/dep1.jar"));
expectedNormalizedPaths.add(Paths.get("target/dependencies/dep1.jar"));
pathsToNormalize.add(Paths.get("../../projects/project/start.sh"));
expectedNormalizedPaths.add(Paths.get("start.sh"));
pathsToNormalize.add(Paths.get("../README.md"));
// We expect this to be filtered out, hence no expectedNormalizedPaths entry here.
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PathUtils.java
// public class PathUtils {
//
// public static List<Path> normalizeAll(Path basePath, List<Path> paths) {
// List<Path> normalizedPaths = new ArrayList<>();
// for (Path path : paths) {
// normalize(basePath, path).ifPresent(normalizedPaths::add);
// }
//
// return normalizedPaths;
// }
//
// public static Optional<Path> normalize(Path basePath, Path path) {
// Path absoluteBasePath = basePath.toAbsolutePath();
// Path normalizedAbsolutePath = absoluteBasePath.resolve(path).normalize();
//
// if (normalizedAbsolutePath.startsWith(absoluteBasePath)) {
// return Optional.of(absoluteBasePath.relativize(normalizedAbsolutePath));
// }
//
// return Optional.empty();
// }
//
// public static boolean isValidPath(Path basePath, Path path) {
// return normalize(basePath, path).isPresent();
// }
//
// public static List<Path> expandDirectories(Path basePath, List<Path> paths) throws IOException {
// ArrayList<Path> result = new ArrayList<>();
// for (Path path : paths) {
// result.addAll(expandDirectory(basePath, path));
// }
//
// return result;
// }
//
// public static List<Path> expandDirectory(Path basePath, Path path) throws IOException {
// return Files
// .walk(basePath.resolve(path).normalize())
// .filter(subPath -> !Files.isDirectory(subPath))
// .map(subPath -> normalize(basePath, subPath))
// .filter(Optional::isPresent)
// .map(Optional::get)
// .collect(Collectors.toList());
// }
//
// public static String separatorsToUnix(Path path) {
// // Path will normalize separators back to Windows when run on Windows. We have to fall back to a String here.
// return path.toString().replace('\\', '/');
// }
// }
// Path: heroku-deploy/src/test/java/com/heroku/sdk/deploy/lib/PathUtilsTest.java
import com.heroku.sdk.deploy.util.PathUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
package com.heroku.sdk.deploy.lib;
public class PathUtilsTest {
private Path basePath = Paths.get("/home/user/projects/project");
private Path existingBasePath;
@Test
public void testNormalizeAll() {
ArrayList<Path> pathsToNormalize = new ArrayList<>();
ArrayList<Path> expectedNormalizedPaths = new ArrayList<>();
pathsToNormalize.add(Paths.get("README.md"));
expectedNormalizedPaths.add(Paths.get("README.md"));
pathsToNormalize.add(Paths.get("docs//HACKING.md"));
expectedNormalizedPaths.add(Paths.get("docs/HACKING.md"));
pathsToNormalize.add(Paths.get("target/dependencies/../project.war"));
expectedNormalizedPaths.add(Paths.get("target/project.war"));
pathsToNormalize.add(Paths.get("/home/user/projects/project/target/dependencies/dep1.jar"));
expectedNormalizedPaths.add(Paths.get("target/dependencies/dep1.jar"));
pathsToNormalize.add(Paths.get("../../projects/project/start.sh"));
expectedNormalizedPaths.add(Paths.get("start.sh"));
pathsToNormalize.add(Paths.get("../README.md"));
// We expect this to be filtered out, hence no expectedNormalizedPaths entry here.
|
List<Path> normalizedPaths = PathUtils.normalizeAll(basePath, pathsToNormalize);
|
heroku/heroku-maven-plugin
|
heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/DeployWar.java
|
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StandaloneDeploy.java
// public class StandaloneDeploy {
// private static OutputAdapter outputAdapter = new StdOutOutputAdapter(true);
//
// public static void deploy(Mode mode) throws IOException, InterruptedException {
// final Path projectDirectory = Paths.get(System.getProperty("user.dir"));
//
//
// final Optional<String> appName = AppNameResolver.resolve(projectDirectory, Optional::empty);
// if (!appName.isPresent()) {
// outputAdapter.logError("Heroku app name must be provided.");
// System.exit(-1);
// }
//
// Optional<String> apiKey = ApiKeyResolver.resolve(projectDirectory);
// if (!apiKey.isPresent()) {
// outputAdapter.logError("Heroku API key must be provided.");
// System.exit(-1);
// }
//
// Procfile defaultProcfile = Procfile.empty();
// List<Path> includedPaths = getIncludedPathsFromProperties();
//
// switch (mode) {
// case JAR:
// final String herokuJarFileSystemProperty = System.getProperty("heroku.jarFile");
// final String herokuJarOptsSystemProperty = System.getProperty("heroku.jarOpts", "");
//
// if (herokuJarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing JAR file must be provided with heroku.jarFile system property!");
// System.exit(-1);
// }
//
// final Path localJarFilePath = Paths.get(herokuJarFileSystemProperty);
// includedPaths.add(localJarFilePath);
//
// String jarCommand = String.format(
// "java $JAVA_OPTS -jar %s %s $JAR_OPTS",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localJarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""),
// herokuJarOptsSystemProperty);
//
// defaultProcfile = Procfile.singleton("web", jarCommand);
// break;
// case WAR:
// final String herokuWarFileSystemProperty = System.getProperty("heroku.warFile");
//
// if (herokuWarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing WAR file must be provided with heroku.warFile system property!");
// System.exit(-1);
// }
//
// final Path localWarFilePath = Paths.get(herokuWarFileSystemProperty);
// includedPaths.add(localWarFilePath);
//
// String warCommand = String.format(
// "java $JAVA_OPTS -jar webapp-runner.jar $WEBAPP_RUNNER_OPTS --port $PORT ./ %s",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localWarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""));
//
// defaultProcfile = Procfile.singleton("web", warCommand);
// break;
// }
//
// SourceBlobDescriptor sourceBlobDescriptor = JvmProjectSourceBlobCreator.create(
// projectDirectory,
// "heroku-deploy-standalone",
// includedPaths,
// Procfile::empty,
// defaultProcfile,
// Optional::empty,
// outputAdapter
// );
//
// if (mode == Mode.WAR) {
// URI webappRunnerUri = Optional
// .ofNullable(System.getProperty("heroku.webappRunnerUrl", null))
// .map(URI::create)
// .orElseGet(() -> {
// String version = System.getProperty("heroku.webappRunnerVersion", Constants.DEFAULT_WEBAPP_RUNNER_VERSION);
// return WebappRunnerResolver.getUrlForVersion(version);
// });
//
// Path webappRunnerLocalPath = FileDownloader.download(webappRunnerUri);
// sourceBlobDescriptor.addLocalPath("webapp-runner.jar", webappRunnerLocalPath, true);
// }
//
// Path sourceBlobArchive = SourceBlobPackager.pack(sourceBlobDescriptor, outputAdapter);
//
// DeploymentDescriptor deploymentDescriptor = new DeploymentDescriptor(
// appName.get(),
// getBuildpacksFromProperties(),
// Collections.emptyMap(),
// sourceBlobArchive,
// GitUtils.getHeadCommitHash(projectDirectory).orElse("unknown"));
//
// Properties pomProperties = PropertiesUtils.loadPomPropertiesOrEmptyFromClasspath(StandaloneDeploy.class, "com.heroku.sdk", "heroku-deploy-standalone");
//
// Deployer.deploy(
// apiKey.get(),
// "heroku-deploy-standalone",
// pomProperties.getProperty("version", "unknown"),
// deploymentDescriptor,
// outputAdapter);
// }
//
// private static List<Path> getIncludedPathsFromProperties() {
// final String herokuIncludes = System.getProperty("heroku.includes");
//
// if (herokuIncludes == null) {
// return new ArrayList<>();
// }
//
// List<Path> includedPaths = new ArrayList<>();
// for (String filePathString : herokuIncludes.split(File.pathSeparator)) {
// includedPaths.add(Paths.get(filePathString));
// }
//
// return includedPaths;
// }
//
// private static List<String> getBuildpacksFromProperties() {
// String buildpacksString = System.getProperty("heroku.buildpacks");
//
// if (buildpacksString == null) {
// return Collections.emptyList();
// }
//
// String buildpacksDelim = System.getProperty("heroku.buildpacksDelim", ",");
// return Arrays.asList(buildpacksString.split(Pattern.quote(buildpacksDelim)));
// }
//
// public enum Mode {
// JAR, WAR
// }
// }
|
import com.heroku.sdk.deploy.standalone.StandaloneDeploy;
import java.io.IOException;
|
package com.heroku.sdk.deploy;
// Entry point for standalone WAR deployment. Located in this package to provide backwards comparability with 2.x.
public class DeployWar {
public static void main(String[] args) throws IOException, InterruptedException {
|
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/standalone/StandaloneDeploy.java
// public class StandaloneDeploy {
// private static OutputAdapter outputAdapter = new StdOutOutputAdapter(true);
//
// public static void deploy(Mode mode) throws IOException, InterruptedException {
// final Path projectDirectory = Paths.get(System.getProperty("user.dir"));
//
//
// final Optional<String> appName = AppNameResolver.resolve(projectDirectory, Optional::empty);
// if (!appName.isPresent()) {
// outputAdapter.logError("Heroku app name must be provided.");
// System.exit(-1);
// }
//
// Optional<String> apiKey = ApiKeyResolver.resolve(projectDirectory);
// if (!apiKey.isPresent()) {
// outputAdapter.logError("Heroku API key must be provided.");
// System.exit(-1);
// }
//
// Procfile defaultProcfile = Procfile.empty();
// List<Path> includedPaths = getIncludedPathsFromProperties();
//
// switch (mode) {
// case JAR:
// final String herokuJarFileSystemProperty = System.getProperty("heroku.jarFile");
// final String herokuJarOptsSystemProperty = System.getProperty("heroku.jarOpts", "");
//
// if (herokuJarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing JAR file must be provided with heroku.jarFile system property!");
// System.exit(-1);
// }
//
// final Path localJarFilePath = Paths.get(herokuJarFileSystemProperty);
// includedPaths.add(localJarFilePath);
//
// String jarCommand = String.format(
// "java $JAVA_OPTS -jar %s %s $JAR_OPTS",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localJarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""),
// herokuJarOptsSystemProperty);
//
// defaultProcfile = Procfile.singleton("web", jarCommand);
// break;
// case WAR:
// final String herokuWarFileSystemProperty = System.getProperty("heroku.warFile");
//
// if (herokuWarFileSystemProperty == null) {
// outputAdapter.logError("Path to existing WAR file must be provided with heroku.warFile system property!");
// System.exit(-1);
// }
//
// final Path localWarFilePath = Paths.get(herokuWarFileSystemProperty);
// includedPaths.add(localWarFilePath);
//
// String warCommand = String.format(
// "java $JAVA_OPTS -jar webapp-runner.jar $WEBAPP_RUNNER_OPTS --port $PORT ./ %s",
// // We fall back to an empty string if the path cannot be normalized. This will result in an
// // user-readable error from JvmProjectSourceBlobCreator and the Procfile will never be deployed.
// PathUtils.normalize(projectDirectory, localWarFilePath)
// .map(PathUtils::separatorsToUnix)
// .orElse(""));
//
// defaultProcfile = Procfile.singleton("web", warCommand);
// break;
// }
//
// SourceBlobDescriptor sourceBlobDescriptor = JvmProjectSourceBlobCreator.create(
// projectDirectory,
// "heroku-deploy-standalone",
// includedPaths,
// Procfile::empty,
// defaultProcfile,
// Optional::empty,
// outputAdapter
// );
//
// if (mode == Mode.WAR) {
// URI webappRunnerUri = Optional
// .ofNullable(System.getProperty("heroku.webappRunnerUrl", null))
// .map(URI::create)
// .orElseGet(() -> {
// String version = System.getProperty("heroku.webappRunnerVersion", Constants.DEFAULT_WEBAPP_RUNNER_VERSION);
// return WebappRunnerResolver.getUrlForVersion(version);
// });
//
// Path webappRunnerLocalPath = FileDownloader.download(webappRunnerUri);
// sourceBlobDescriptor.addLocalPath("webapp-runner.jar", webappRunnerLocalPath, true);
// }
//
// Path sourceBlobArchive = SourceBlobPackager.pack(sourceBlobDescriptor, outputAdapter);
//
// DeploymentDescriptor deploymentDescriptor = new DeploymentDescriptor(
// appName.get(),
// getBuildpacksFromProperties(),
// Collections.emptyMap(),
// sourceBlobArchive,
// GitUtils.getHeadCommitHash(projectDirectory).orElse("unknown"));
//
// Properties pomProperties = PropertiesUtils.loadPomPropertiesOrEmptyFromClasspath(StandaloneDeploy.class, "com.heroku.sdk", "heroku-deploy-standalone");
//
// Deployer.deploy(
// apiKey.get(),
// "heroku-deploy-standalone",
// pomProperties.getProperty("version", "unknown"),
// deploymentDescriptor,
// outputAdapter);
// }
//
// private static List<Path> getIncludedPathsFromProperties() {
// final String herokuIncludes = System.getProperty("heroku.includes");
//
// if (herokuIncludes == null) {
// return new ArrayList<>();
// }
//
// List<Path> includedPaths = new ArrayList<>();
// for (String filePathString : herokuIncludes.split(File.pathSeparator)) {
// includedPaths.add(Paths.get(filePathString));
// }
//
// return includedPaths;
// }
//
// private static List<String> getBuildpacksFromProperties() {
// String buildpacksString = System.getProperty("heroku.buildpacks");
//
// if (buildpacksString == null) {
// return Collections.emptyList();
// }
//
// String buildpacksDelim = System.getProperty("heroku.buildpacksDelim", ",");
// return Arrays.asList(buildpacksString.split(Pattern.quote(buildpacksDelim)));
// }
//
// public enum Mode {
// JAR, WAR
// }
// }
// Path: heroku-deploy-standalone/src/main/java/com/heroku/sdk/deploy/DeployWar.java
import com.heroku.sdk.deploy.standalone.StandaloneDeploy;
import java.io.IOException;
package com.heroku.sdk.deploy;
// Entry point for standalone WAR deployment. Located in this package to provide backwards comparability with 2.x.
public class DeployWar {
public static void main(String[] args) throws IOException, InterruptedException {
|
StandaloneDeploy.deploy(StandaloneDeploy.Mode.WAR);
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/deploymemt/Deployer.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/io/UploadProgressHttpEntity.java
// public class UploadProgressHttpEntity implements HttpEntity {
// private HttpEntity wrappedEntity;
// private Consumer<Long> progressConsumer;
//
// public UploadProgressHttpEntity(HttpEntity wrappedEntity, Consumer<Long> progressConsumer) {
// this.wrappedEntity = wrappedEntity;
// this.progressConsumer = progressConsumer;
// }
//
// @Override
// public boolean isRepeatable() {
// return wrappedEntity.isRepeatable();
// }
//
// @Override
// public boolean isChunked() {
// return wrappedEntity.isChunked();
// }
//
// @Override
// public long getContentLength() {
// return wrappedEntity.getContentLength();
// }
//
// @Override
// public Header getContentType() {
// return wrappedEntity.getContentType();
// }
//
// @Override
// public Header getContentEncoding() {
// return wrappedEntity.getContentEncoding();
// }
//
// @Override
// public InputStream getContent() throws IOException, UnsupportedOperationException {
// return wrappedEntity.getContent();
// }
//
// @Override
// public void writeTo(OutputStream outputStream) throws IOException {
// CountingOutputStream countingOutputStream = new CountingOutputStream(outputStream, progressConsumer);
// wrappedEntity.writeTo(countingOutputStream);
// }
//
// @Override
// public boolean isStreaming() {
// return wrappedEntity.isStreaming();
// }
//
// @Override
// @SuppressWarnings("deprecation")
// public void consumeContent() throws IOException {
// wrappedEntity.consumeContent();
// }
// }
|
import com.heroku.api.HerokuAPI;
import com.heroku.api.Source;
import com.heroku.sdk.deploy.api.*;
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.io.UploadProgressHttpEntity;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import javax.net.ssl.SSLHandshakeException;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
|
outputAdapter.logWarn(String.format("Could not get updated build information. Will try again for some time... (%s)", e.getMessage()));
}
if (!buildInfo.status.equals("succeeded")) {
outputAdapter.logDebug("Failed Build ID: " + buildInfo.id);
outputAdapter.logDebug("Failed Build Status: " + buildInfo.status);
outputAdapter.logDebug("Failed Build UpdatedAt: " + buildInfo.updatedAt);
return false;
}
outputAdapter.logInfo("-----> Done");
return true;
}
private static BuildInfo pollForNonPendingBuildInfo(String appName, String buildId, HerokuDeployApi herokuDeployApi) throws IOException, InterruptedException, HerokuDeployApiException {
for (int i = 0; i < 15; i++) {
BuildInfo latestBuildInfo = herokuDeployApi.getBuildInfo(appName, buildId);
if (!latestBuildInfo.status.equals("pending")) {
return latestBuildInfo;
}
Thread.sleep(2000);
}
return herokuDeployApi.getBuildInfo(appName, buildId);
}
private static void uploadSourceBlob(Path path, URI destination, BiConsumer<Long, Long> progressConsumer) throws IOException {
long fileSize = Files.size(path);
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/io/UploadProgressHttpEntity.java
// public class UploadProgressHttpEntity implements HttpEntity {
// private HttpEntity wrappedEntity;
// private Consumer<Long> progressConsumer;
//
// public UploadProgressHttpEntity(HttpEntity wrappedEntity, Consumer<Long> progressConsumer) {
// this.wrappedEntity = wrappedEntity;
// this.progressConsumer = progressConsumer;
// }
//
// @Override
// public boolean isRepeatable() {
// return wrappedEntity.isRepeatable();
// }
//
// @Override
// public boolean isChunked() {
// return wrappedEntity.isChunked();
// }
//
// @Override
// public long getContentLength() {
// return wrappedEntity.getContentLength();
// }
//
// @Override
// public Header getContentType() {
// return wrappedEntity.getContentType();
// }
//
// @Override
// public Header getContentEncoding() {
// return wrappedEntity.getContentEncoding();
// }
//
// @Override
// public InputStream getContent() throws IOException, UnsupportedOperationException {
// return wrappedEntity.getContent();
// }
//
// @Override
// public void writeTo(OutputStream outputStream) throws IOException {
// CountingOutputStream countingOutputStream = new CountingOutputStream(outputStream, progressConsumer);
// wrappedEntity.writeTo(countingOutputStream);
// }
//
// @Override
// public boolean isStreaming() {
// return wrappedEntity.isStreaming();
// }
//
// @Override
// @SuppressWarnings("deprecation")
// public void consumeContent() throws IOException {
// wrappedEntity.consumeContent();
// }
// }
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/deploymemt/Deployer.java
import com.heroku.api.HerokuAPI;
import com.heroku.api.Source;
import com.heroku.sdk.deploy.api.*;
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.io.UploadProgressHttpEntity;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import javax.net.ssl.SSLHandshakeException;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
outputAdapter.logWarn(String.format("Could not get updated build information. Will try again for some time... (%s)", e.getMessage()));
}
if (!buildInfo.status.equals("succeeded")) {
outputAdapter.logDebug("Failed Build ID: " + buildInfo.id);
outputAdapter.logDebug("Failed Build Status: " + buildInfo.status);
outputAdapter.logDebug("Failed Build UpdatedAt: " + buildInfo.updatedAt);
return false;
}
outputAdapter.logInfo("-----> Done");
return true;
}
private static BuildInfo pollForNonPendingBuildInfo(String appName, String buildId, HerokuDeployApi herokuDeployApi) throws IOException, InterruptedException, HerokuDeployApiException {
for (int i = 0; i < 15; i++) {
BuildInfo latestBuildInfo = herokuDeployApi.getBuildInfo(appName, buildId);
if (!latestBuildInfo.status.equals("pending")) {
return latestBuildInfo;
}
Thread.sleep(2000);
}
return herokuDeployApi.getBuildInfo(appName, buildId);
}
private static void uploadSourceBlob(Path path, URI destination, BiConsumer<Long, Long> progressConsumer) throws IOException {
long fileSize = Files.size(path);
|
CloseableHttpClient client = CustomHttpClientBuilder.build();
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/deploymemt/Deployer.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/io/UploadProgressHttpEntity.java
// public class UploadProgressHttpEntity implements HttpEntity {
// private HttpEntity wrappedEntity;
// private Consumer<Long> progressConsumer;
//
// public UploadProgressHttpEntity(HttpEntity wrappedEntity, Consumer<Long> progressConsumer) {
// this.wrappedEntity = wrappedEntity;
// this.progressConsumer = progressConsumer;
// }
//
// @Override
// public boolean isRepeatable() {
// return wrappedEntity.isRepeatable();
// }
//
// @Override
// public boolean isChunked() {
// return wrappedEntity.isChunked();
// }
//
// @Override
// public long getContentLength() {
// return wrappedEntity.getContentLength();
// }
//
// @Override
// public Header getContentType() {
// return wrappedEntity.getContentType();
// }
//
// @Override
// public Header getContentEncoding() {
// return wrappedEntity.getContentEncoding();
// }
//
// @Override
// public InputStream getContent() throws IOException, UnsupportedOperationException {
// return wrappedEntity.getContent();
// }
//
// @Override
// public void writeTo(OutputStream outputStream) throws IOException {
// CountingOutputStream countingOutputStream = new CountingOutputStream(outputStream, progressConsumer);
// wrappedEntity.writeTo(countingOutputStream);
// }
//
// @Override
// public boolean isStreaming() {
// return wrappedEntity.isStreaming();
// }
//
// @Override
// @SuppressWarnings("deprecation")
// public void consumeContent() throws IOException {
// wrappedEntity.consumeContent();
// }
// }
|
import com.heroku.api.HerokuAPI;
import com.heroku.api.Source;
import com.heroku.sdk.deploy.api.*;
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.io.UploadProgressHttpEntity;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import javax.net.ssl.SSLHandshakeException;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
|
if (!buildInfo.status.equals("succeeded")) {
outputAdapter.logDebug("Failed Build ID: " + buildInfo.id);
outputAdapter.logDebug("Failed Build Status: " + buildInfo.status);
outputAdapter.logDebug("Failed Build UpdatedAt: " + buildInfo.updatedAt);
return false;
}
outputAdapter.logInfo("-----> Done");
return true;
}
private static BuildInfo pollForNonPendingBuildInfo(String appName, String buildId, HerokuDeployApi herokuDeployApi) throws IOException, InterruptedException, HerokuDeployApiException {
for (int i = 0; i < 15; i++) {
BuildInfo latestBuildInfo = herokuDeployApi.getBuildInfo(appName, buildId);
if (!latestBuildInfo.status.equals("pending")) {
return latestBuildInfo;
}
Thread.sleep(2000);
}
return herokuDeployApi.getBuildInfo(appName, buildId);
}
private static void uploadSourceBlob(Path path, URI destination, BiConsumer<Long, Long> progressConsumer) throws IOException {
long fileSize = Files.size(path);
CloseableHttpClient client = CustomHttpClientBuilder.build();
HttpPut request = new HttpPut(destination);
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/OutputAdapter.java
// public interface OutputAdapter {
// void logInfo(String message);
// void logDebug(String message);
// void logWarn(String message);
// void logError(String message);
// void logUploadProgress(long uploaded, long contentLength);
//
// default void logError(String message, Throwable t) {
// logError(message);
// logDebug(Arrays.stream(t.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")));
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/CustomHttpClientBuilder.java
// public class CustomHttpClientBuilder {
//
// public static CloseableHttpClient build() {
// /*
// Workaround for JDK-8220723 (https://bugs.openjdk.java.net/browse/JDK-8220723)
// We limit the available protocols to TLS 1.2 to avoid triggering the bug with TLS 1.3.
//
// Version 11.0.2 is significant to us as it is the default OpenJDK version on Travis CI for Java 11. Since running
// on CI/CD is one of the main use-cases for this library, we can justify this workaround for a bug in an older
// version of the JDK.
//
// As soon as 11.0.2 is no longer the default on Travis please consider removing this workaround.
//
// Issue: https://github.com/heroku/heroku-maven-plugin/issues/71
// */
// if (System.getProperty("java.version").equals("11.0.2")) {
// final String[] supportedProtocols = new String[] { "TLSv1.2" };
// final String[] supportedCipherSuites = split(System.getProperty("https.cipherSuites"));
//
// LayeredConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
// (SSLSocketFactory) SSLSocketFactory.getDefault(),
// supportedProtocols, supportedCipherSuites, new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()));
//
// return HttpClientBuilder
// .create()
// .useSystemProperties()
// .setSSLSocketFactory(sslConnectionSocketFactory)
// .build();
// }
//
// return HttpClients.createSystem();
// }
//
// private static String[] split(final String s) {
// if (TextUtils.isBlank(s)) {
// return null;
// }
// return s.split(" *, *");
// }
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/io/UploadProgressHttpEntity.java
// public class UploadProgressHttpEntity implements HttpEntity {
// private HttpEntity wrappedEntity;
// private Consumer<Long> progressConsumer;
//
// public UploadProgressHttpEntity(HttpEntity wrappedEntity, Consumer<Long> progressConsumer) {
// this.wrappedEntity = wrappedEntity;
// this.progressConsumer = progressConsumer;
// }
//
// @Override
// public boolean isRepeatable() {
// return wrappedEntity.isRepeatable();
// }
//
// @Override
// public boolean isChunked() {
// return wrappedEntity.isChunked();
// }
//
// @Override
// public long getContentLength() {
// return wrappedEntity.getContentLength();
// }
//
// @Override
// public Header getContentType() {
// return wrappedEntity.getContentType();
// }
//
// @Override
// public Header getContentEncoding() {
// return wrappedEntity.getContentEncoding();
// }
//
// @Override
// public InputStream getContent() throws IOException, UnsupportedOperationException {
// return wrappedEntity.getContent();
// }
//
// @Override
// public void writeTo(OutputStream outputStream) throws IOException {
// CountingOutputStream countingOutputStream = new CountingOutputStream(outputStream, progressConsumer);
// wrappedEntity.writeTo(countingOutputStream);
// }
//
// @Override
// public boolean isStreaming() {
// return wrappedEntity.isStreaming();
// }
//
// @Override
// @SuppressWarnings("deprecation")
// public void consumeContent() throws IOException {
// wrappedEntity.consumeContent();
// }
// }
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/deploymemt/Deployer.java
import com.heroku.api.HerokuAPI;
import com.heroku.api.Source;
import com.heroku.sdk.deploy.api.*;
import com.heroku.sdk.deploy.lib.OutputAdapter;
import com.heroku.sdk.deploy.util.CustomHttpClientBuilder;
import com.heroku.sdk.deploy.util.io.UploadProgressHttpEntity;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import javax.net.ssl.SSLHandshakeException;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
if (!buildInfo.status.equals("succeeded")) {
outputAdapter.logDebug("Failed Build ID: " + buildInfo.id);
outputAdapter.logDebug("Failed Build Status: " + buildInfo.status);
outputAdapter.logDebug("Failed Build UpdatedAt: " + buildInfo.updatedAt);
return false;
}
outputAdapter.logInfo("-----> Done");
return true;
}
private static BuildInfo pollForNonPendingBuildInfo(String appName, String buildId, HerokuDeployApi herokuDeployApi) throws IOException, InterruptedException, HerokuDeployApiException {
for (int i = 0; i < 15; i++) {
BuildInfo latestBuildInfo = herokuDeployApi.getBuildInfo(appName, buildId);
if (!latestBuildInfo.status.equals("pending")) {
return latestBuildInfo;
}
Thread.sleep(2000);
}
return herokuDeployApi.getBuildInfo(appName, buildId);
}
private static void uploadSourceBlob(Path path, URI destination, BiConsumer<Long, Long> progressConsumer) throws IOException {
long fileSize = Files.size(path);
CloseableHttpClient client = CustomHttpClientBuilder.build();
HttpPut request = new HttpPut(destination);
|
request.setEntity(new UploadProgressHttpEntity(new FileEntity(path.toFile()), bytes -> progressConsumer.accept(bytes, fileSize)));
|
heroku/heroku-maven-plugin
|
heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/resolver/ApiKeyResolver.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/HerokuCli.java
// public class HerokuCli {
//
// public static Optional<String> runAuthToken(Path workingDirectory) throws IOException {
// return Optional.ofNullable(runRaw(workingDirectory,"auth:token").get(0));
// }
//
// private static List<String> runRaw(Path workingDirectory, String... command) throws IOException {
// List<String> fullCommand = new ArrayList<>(Arrays.asList(command));
// fullCommand.add(0, "heroku");
//
// ProcessBuilder processBuilder = new ProcessBuilder(fullCommand);
// processBuilder.directory(workingDirectory.toFile());
// Process process = processBuilder.start();
//
// try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
// return bufferedReader.lines().collect(Collectors.toList());
// }
// }
// }
|
import com.heroku.sdk.deploy.util.HerokuCli;
import org.eclipse.jgit.transport.NetRC;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
|
package com.heroku.sdk.deploy.lib.resolver;
public class ApiKeyResolver {
public static Optional<String> resolve(Path directory) throws IOException {
Optional<String> environmentApiKey = readApiKeyFromEnvironment();
if (environmentApiKey.isPresent()) {
return environmentApiKey;
}
Optional<String> netRcPassword = ApiKeyResolver.readPasswordFromNetRc();
if (netRcPassword.isPresent()) {
return netRcPassword;
}
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/HerokuCli.java
// public class HerokuCli {
//
// public static Optional<String> runAuthToken(Path workingDirectory) throws IOException {
// return Optional.ofNullable(runRaw(workingDirectory,"auth:token").get(0));
// }
//
// private static List<String> runRaw(Path workingDirectory, String... command) throws IOException {
// List<String> fullCommand = new ArrayList<>(Arrays.asList(command));
// fullCommand.add(0, "heroku");
//
// ProcessBuilder processBuilder = new ProcessBuilder(fullCommand);
// processBuilder.directory(workingDirectory.toFile());
// Process process = processBuilder.start();
//
// try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
// return bufferedReader.lines().collect(Collectors.toList());
// }
// }
// }
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/lib/resolver/ApiKeyResolver.java
import com.heroku.sdk.deploy.util.HerokuCli;
import org.eclipse.jgit.transport.NetRC;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
package com.heroku.sdk.deploy.lib.resolver;
public class ApiKeyResolver {
public static Optional<String> resolve(Path directory) throws IOException {
Optional<String> environmentApiKey = readApiKeyFromEnvironment();
if (environmentApiKey.isPresent()) {
return environmentApiKey;
}
Optional<String> netRcPassword = ApiKeyResolver.readPasswordFromNetRc();
if (netRcPassword.isPresent()) {
return netRcPassword;
}
|
return HerokuCli.runAuthToken(directory);
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/AbstractHerokuMojo.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/Constants.java
// public class Constants {
// public static final String DEFAULT_WEBAPP_RUNNER_VERSION = "9.0.30.0";
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PathUtils.java
// public class PathUtils {
//
// public static List<Path> normalizeAll(Path basePath, List<Path> paths) {
// List<Path> normalizedPaths = new ArrayList<>();
// for (Path path : paths) {
// normalize(basePath, path).ifPresent(normalizedPaths::add);
// }
//
// return normalizedPaths;
// }
//
// public static Optional<Path> normalize(Path basePath, Path path) {
// Path absoluteBasePath = basePath.toAbsolutePath();
// Path normalizedAbsolutePath = absoluteBasePath.resolve(path).normalize();
//
// if (normalizedAbsolutePath.startsWith(absoluteBasePath)) {
// return Optional.of(absoluteBasePath.relativize(normalizedAbsolutePath));
// }
//
// return Optional.empty();
// }
//
// public static boolean isValidPath(Path basePath, Path path) {
// return normalize(basePath, path).isPresent();
// }
//
// public static List<Path> expandDirectories(Path basePath, List<Path> paths) throws IOException {
// ArrayList<Path> result = new ArrayList<>();
// for (Path path : paths) {
// result.addAll(expandDirectory(basePath, path));
// }
//
// return result;
// }
//
// public static List<Path> expandDirectory(Path basePath, Path path) throws IOException {
// return Files
// .walk(basePath.resolve(path).normalize())
// .filter(subPath -> !Files.isDirectory(subPath))
// .map(subPath -> normalize(basePath, subPath))
// .filter(Optional::isPresent)
// .map(Optional::get)
// .collect(Collectors.toList());
// }
//
// public static String separatorsToUnix(Path path) {
// // Path will normalize separators back to Windows when run on Windows. We have to fall back to a String here.
// return path.toString().replace('\\', '/');
// }
// }
|
import com.heroku.sdk.deploy.Constants;
import com.heroku.sdk.deploy.util.PathUtils;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.Mojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
|
package com.heroku.sdk.maven.mojo;
public abstract class AbstractHerokuMojo extends AbstractMojo {
@Parameter(defaultValue="${session}", readonly=true)
protected MavenSession mavenSession;
@Parameter(defaultValue="${project}", readonly=true)
protected MavenProject mavenProject;
@Component
protected BuildPluginManager pluginManager;
/**
* The name of the Heroku app.
*/
@Parameter(name="appName", property="heroku.appName")
protected String appName = null;
/**
* The version of the JDK Heroku with run the app with.
*/
@Parameter(name="jdkVersion", property="heroku.jdkVersion")
protected String jdkVersion = null;
/**
* Configuration variables that will be set on the Heroku app.
*/
@Parameter(name="configVars")
protected Map<String, String> configVars = Collections.emptyMap();
/**
* A set of file patterns to include.
*/
@Parameter(name="includes")
protected List<String> includes = new ArrayList<>();
/**
* If the target directory should also be included. Defaults to true.
*/
@Parameter(name="includeTarget", defaultValue = "true")
protected boolean includeTarget = true;
/**
* If upload progress should be logged to debug.
*/
@Parameter(name="logProgress", defaultValue = "false")
protected boolean logProgress = false;
/**
* The buildpacks to run against the partial build.
*/
@Parameter(name="buildpacks")
protected List<String> buildpacks = new ArrayList<>();
/**
* The process types used to run on Heroku (similar to Procfile).
*/
@Parameter(name="processTypes")
protected Map<String, String> processTypes = Collections.emptyMap();
/**
* The path to the war file that will be deployed with the `deploy-war` target.
*/
@Parameter(name="warFile")
protected String warFile = null;
/**
* The version of webapp-runner to use.
*/
@Parameter(name="webappRunnerVersion")
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/Constants.java
// public class Constants {
// public static final String DEFAULT_WEBAPP_RUNNER_VERSION = "9.0.30.0";
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PathUtils.java
// public class PathUtils {
//
// public static List<Path> normalizeAll(Path basePath, List<Path> paths) {
// List<Path> normalizedPaths = new ArrayList<>();
// for (Path path : paths) {
// normalize(basePath, path).ifPresent(normalizedPaths::add);
// }
//
// return normalizedPaths;
// }
//
// public static Optional<Path> normalize(Path basePath, Path path) {
// Path absoluteBasePath = basePath.toAbsolutePath();
// Path normalizedAbsolutePath = absoluteBasePath.resolve(path).normalize();
//
// if (normalizedAbsolutePath.startsWith(absoluteBasePath)) {
// return Optional.of(absoluteBasePath.relativize(normalizedAbsolutePath));
// }
//
// return Optional.empty();
// }
//
// public static boolean isValidPath(Path basePath, Path path) {
// return normalize(basePath, path).isPresent();
// }
//
// public static List<Path> expandDirectories(Path basePath, List<Path> paths) throws IOException {
// ArrayList<Path> result = new ArrayList<>();
// for (Path path : paths) {
// result.addAll(expandDirectory(basePath, path));
// }
//
// return result;
// }
//
// public static List<Path> expandDirectory(Path basePath, Path path) throws IOException {
// return Files
// .walk(basePath.resolve(path).normalize())
// .filter(subPath -> !Files.isDirectory(subPath))
// .map(subPath -> normalize(basePath, subPath))
// .filter(Optional::isPresent)
// .map(Optional::get)
// .collect(Collectors.toList());
// }
//
// public static String separatorsToUnix(Path path) {
// // Path will normalize separators back to Windows when run on Windows. We have to fall back to a String here.
// return path.toString().replace('\\', '/');
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/AbstractHerokuMojo.java
import com.heroku.sdk.deploy.Constants;
import com.heroku.sdk.deploy.util.PathUtils;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.Mojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
package com.heroku.sdk.maven.mojo;
public abstract class AbstractHerokuMojo extends AbstractMojo {
@Parameter(defaultValue="${session}", readonly=true)
protected MavenSession mavenSession;
@Parameter(defaultValue="${project}", readonly=true)
protected MavenProject mavenProject;
@Component
protected BuildPluginManager pluginManager;
/**
* The name of the Heroku app.
*/
@Parameter(name="appName", property="heroku.appName")
protected String appName = null;
/**
* The version of the JDK Heroku with run the app with.
*/
@Parameter(name="jdkVersion", property="heroku.jdkVersion")
protected String jdkVersion = null;
/**
* Configuration variables that will be set on the Heroku app.
*/
@Parameter(name="configVars")
protected Map<String, String> configVars = Collections.emptyMap();
/**
* A set of file patterns to include.
*/
@Parameter(name="includes")
protected List<String> includes = new ArrayList<>();
/**
* If the target directory should also be included. Defaults to true.
*/
@Parameter(name="includeTarget", defaultValue = "true")
protected boolean includeTarget = true;
/**
* If upload progress should be logged to debug.
*/
@Parameter(name="logProgress", defaultValue = "false")
protected boolean logProgress = false;
/**
* The buildpacks to run against the partial build.
*/
@Parameter(name="buildpacks")
protected List<String> buildpacks = new ArrayList<>();
/**
* The process types used to run on Heroku (similar to Procfile).
*/
@Parameter(name="processTypes")
protected Map<String, String> processTypes = Collections.emptyMap();
/**
* The path to the war file that will be deployed with the `deploy-war` target.
*/
@Parameter(name="warFile")
protected String warFile = null;
/**
* The version of webapp-runner to use.
*/
@Parameter(name="webappRunnerVersion")
|
protected String webappRunnerVersion = Constants.DEFAULT_WEBAPP_RUNNER_VERSION;
|
heroku/heroku-maven-plugin
|
heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/AbstractHerokuMojo.java
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/Constants.java
// public class Constants {
// public static final String DEFAULT_WEBAPP_RUNNER_VERSION = "9.0.30.0";
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PathUtils.java
// public class PathUtils {
//
// public static List<Path> normalizeAll(Path basePath, List<Path> paths) {
// List<Path> normalizedPaths = new ArrayList<>();
// for (Path path : paths) {
// normalize(basePath, path).ifPresent(normalizedPaths::add);
// }
//
// return normalizedPaths;
// }
//
// public static Optional<Path> normalize(Path basePath, Path path) {
// Path absoluteBasePath = basePath.toAbsolutePath();
// Path normalizedAbsolutePath = absoluteBasePath.resolve(path).normalize();
//
// if (normalizedAbsolutePath.startsWith(absoluteBasePath)) {
// return Optional.of(absoluteBasePath.relativize(normalizedAbsolutePath));
// }
//
// return Optional.empty();
// }
//
// public static boolean isValidPath(Path basePath, Path path) {
// return normalize(basePath, path).isPresent();
// }
//
// public static List<Path> expandDirectories(Path basePath, List<Path> paths) throws IOException {
// ArrayList<Path> result = new ArrayList<>();
// for (Path path : paths) {
// result.addAll(expandDirectory(basePath, path));
// }
//
// return result;
// }
//
// public static List<Path> expandDirectory(Path basePath, Path path) throws IOException {
// return Files
// .walk(basePath.resolve(path).normalize())
// .filter(subPath -> !Files.isDirectory(subPath))
// .map(subPath -> normalize(basePath, subPath))
// .filter(Optional::isPresent)
// .map(Optional::get)
// .collect(Collectors.toList());
// }
//
// public static String separatorsToUnix(Path path) {
// // Path will normalize separators back to Windows when run on Windows. We have to fall back to a String here.
// return path.toString().replace('\\', '/');
// }
// }
|
import com.heroku.sdk.deploy.Constants;
import com.heroku.sdk.deploy.util.PathUtils;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.Mojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
|
* The path to the war file that will be deployed with the `deploy-war` target.
*/
@Parameter(name="warFile")
protected String warFile = null;
/**
* The version of webapp-runner to use.
*/
@Parameter(name="webappRunnerVersion")
protected String webappRunnerVersion = Constants.DEFAULT_WEBAPP_RUNNER_VERSION;
/**
* Common helper to find the projects WAR file either by referencing the explicitly configured WAR file or searching
* the build directory for a WAR file. This implementation assumes it is run as part of a goal and will throw
* MojoExecutionExceptions with user-facing error messages.
*
* @param projectDirectory The root directory of the project
* @return An optional Path to the WAR file if one could be found
* @throws MojoExecutionException If there is no explicitly configured WAR file path or the projects packaged as a WAR
* @throws IOException If an IO error occurred during WAR file search
*/
protected Optional<Path> findWarFilePath(Path projectDirectory) throws MojoExecutionException, IOException {
if (warFile == null) {
if (!mavenProject.getPackaging().equals("war")) {
throw new MojoExecutionException("Your packaging must be set to 'war' or you must define the '<warFile>' config to use this goal!");
}
return Files
.find(Paths.get(mavenProject.getBuild().getDirectory()), Integer.MAX_VALUE, (path, attributes) -> path.toString().endsWith(".war"))
.findFirst()
|
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/Constants.java
// public class Constants {
// public static final String DEFAULT_WEBAPP_RUNNER_VERSION = "9.0.30.0";
// }
//
// Path: heroku-deploy/src/main/java/com/heroku/sdk/deploy/util/PathUtils.java
// public class PathUtils {
//
// public static List<Path> normalizeAll(Path basePath, List<Path> paths) {
// List<Path> normalizedPaths = new ArrayList<>();
// for (Path path : paths) {
// normalize(basePath, path).ifPresent(normalizedPaths::add);
// }
//
// return normalizedPaths;
// }
//
// public static Optional<Path> normalize(Path basePath, Path path) {
// Path absoluteBasePath = basePath.toAbsolutePath();
// Path normalizedAbsolutePath = absoluteBasePath.resolve(path).normalize();
//
// if (normalizedAbsolutePath.startsWith(absoluteBasePath)) {
// return Optional.of(absoluteBasePath.relativize(normalizedAbsolutePath));
// }
//
// return Optional.empty();
// }
//
// public static boolean isValidPath(Path basePath, Path path) {
// return normalize(basePath, path).isPresent();
// }
//
// public static List<Path> expandDirectories(Path basePath, List<Path> paths) throws IOException {
// ArrayList<Path> result = new ArrayList<>();
// for (Path path : paths) {
// result.addAll(expandDirectory(basePath, path));
// }
//
// return result;
// }
//
// public static List<Path> expandDirectory(Path basePath, Path path) throws IOException {
// return Files
// .walk(basePath.resolve(path).normalize())
// .filter(subPath -> !Files.isDirectory(subPath))
// .map(subPath -> normalize(basePath, subPath))
// .filter(Optional::isPresent)
// .map(Optional::get)
// .collect(Collectors.toList());
// }
//
// public static String separatorsToUnix(Path path) {
// // Path will normalize separators back to Windows when run on Windows. We have to fall back to a String here.
// return path.toString().replace('\\', '/');
// }
// }
// Path: heroku-maven-plugin/src/main/java/com/heroku/sdk/maven/mojo/AbstractHerokuMojo.java
import com.heroku.sdk.deploy.Constants;
import com.heroku.sdk.deploy.util.PathUtils;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.Mojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
* The path to the war file that will be deployed with the `deploy-war` target.
*/
@Parameter(name="warFile")
protected String warFile = null;
/**
* The version of webapp-runner to use.
*/
@Parameter(name="webappRunnerVersion")
protected String webappRunnerVersion = Constants.DEFAULT_WEBAPP_RUNNER_VERSION;
/**
* Common helper to find the projects WAR file either by referencing the explicitly configured WAR file or searching
* the build directory for a WAR file. This implementation assumes it is run as part of a goal and will throw
* MojoExecutionExceptions with user-facing error messages.
*
* @param projectDirectory The root directory of the project
* @return An optional Path to the WAR file if one could be found
* @throws MojoExecutionException If there is no explicitly configured WAR file path or the projects packaged as a WAR
* @throws IOException If an IO error occurred during WAR file search
*/
protected Optional<Path> findWarFilePath(Path projectDirectory) throws MojoExecutionException, IOException {
if (warFile == null) {
if (!mavenProject.getPackaging().equals("war")) {
throw new MojoExecutionException("Your packaging must be set to 'war' or you must define the '<warFile>' config to use this goal!");
}
return Files
.find(Paths.get(mavenProject.getBuild().getDirectory()), Integer.MAX_VALUE, (path, attributes) -> path.toString().endsWith(".war"))
.findFirst()
|
.flatMap(path -> PathUtils.normalize(projectDirectory, path));
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l020_dstream/l020_filesystem_text_dataframe_class/RowProcessor.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/JavaSparkSessionSingleton.java
// public class JavaSparkSessionSingleton {
// private static transient SparkSession instance = null;
//
// public static SparkSession getInstance(SparkConf sparkConf) {
// if (instance == null) {
// instance = SparkSession.builder().config(sparkConf).getOrCreate();
// }
// return instance;
// }
// }
|
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import net.jgp.labs.spark.x.utils.streaming.JavaSparkSessionSingleton;
|
package net.jgp.labs.spark.l020_dstream.l020_filesystem_text_dataframe_class;
public class RowProcessor implements VoidFunction<JavaRDD<String>> {
private static final long serialVersionUID = 1863623004244123190L;
@Override
public void call(JavaRDD<String> rdd) throws Exception {
JavaRDD<Row> rowRDD = rdd.map(new Function<String, Row>() {
private static final long serialVersionUID = 5167089361335095997L;
@Override
public Row call(String msg) {
Row row = RowFactory.create(msg);
return row;
}
});
// Create Schema
StructType schema = DataTypes.createStructType(
new StructField[] { DataTypes.createStructField("Message",
DataTypes.StringType, true) });
// Get Spark 2.0 session
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/JavaSparkSessionSingleton.java
// public class JavaSparkSessionSingleton {
// private static transient SparkSession instance = null;
//
// public static SparkSession getInstance(SparkConf sparkConf) {
// if (instance == null) {
// instance = SparkSession.builder().config(sparkConf).getOrCreate();
// }
// return instance;
// }
// }
// Path: src/main/java/net/jgp/labs/spark/l020_dstream/l020_filesystem_text_dataframe_class/RowProcessor.java
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import net.jgp.labs.spark.x.utils.streaming.JavaSparkSessionSingleton;
package net.jgp.labs.spark.l020_dstream.l020_filesystem_text_dataframe_class;
public class RowProcessor implements VoidFunction<JavaRDD<String>> {
private static final long serialVersionUID = 1863623004244123190L;
@Override
public void call(JavaRDD<String> rdd) throws Exception {
JavaRDD<Row> rowRDD = rdd.map(new Function<String, Row>() {
private static final long serialVersionUID = 5167089361335095997L;
@Override
public Row call(String msg) {
Row row = RowFactory.create(msg);
return row;
}
});
// Create Schema
StructType schema = DataTypes.createStructType(
new StructField[] { DataTypes.createStructField("Message",
DataTypes.StringType, true) });
// Get Spark 2.0 session
|
SparkSession spark = JavaSparkSessionSingleton.getInstance(rdd.context()
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l000_ingestion/l900_custom/CustomDataSourceToDataset.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/K.java
// public class K {
//
// public static final String PATH = "path";
// public static final String COUNT = "count";
//
// }
|
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.K;
|
package net.jgp.labs.spark.l000_ingestion.l900_custom;
/**
* Turning a custom data source to a Dataset/Dataframe.
*
* @author jgp
*/
public class CustomDataSourceToDataset {
private static transient Logger log = LoggerFactory.getLogger(
CustomDataSourceToDataset.class);
public static void main(String[] args) {
log.debug("Working directory: [{}].", System.getProperty("user.dir"));
CustomDataSourceToDataset app = new CustomDataSourceToDataset();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder().appName(
"Custom data set to Dataset")
.master("local").getOrCreate();
String filename = "data/array-complex.json";
long start = System.currentTimeMillis();
Dataset<Row> df = spark.read()
.format("net.jgp.labs.spark.x.datasource.SubStringCounterDataSource")
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/K.java
// public class K {
//
// public static final String PATH = "path";
// public static final String COUNT = "count";
//
// }
// Path: src/main/java/net/jgp/labs/spark/l000_ingestion/l900_custom/CustomDataSourceToDataset.java
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.K;
package net.jgp.labs.spark.l000_ingestion.l900_custom;
/**
* Turning a custom data source to a Dataset/Dataframe.
*
* @author jgp
*/
public class CustomDataSourceToDataset {
private static transient Logger log = LoggerFactory.getLogger(
CustomDataSourceToDataset.class);
public static void main(String[] args) {
log.debug("Working directory: [{}].", System.getProperty("user.dir"));
CustomDataSourceToDataset app = new CustomDataSourceToDataset();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder().appName(
"Custom data set to Dataset")
.master("local").getOrCreate();
String filename = "data/array-complex.json";
long start = System.currentTimeMillis();
Dataset<Row> df = spark.read()
.format("net.jgp.labs.spark.x.datasource.SubStringCounterDataSource")
|
.option(K.COUNT + "0", "a") // count the number of 'a'
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/x/datasource/SubStringCounterDataSource.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/K.java
// public class K {
//
// public static final String PATH = "path";
// public static final String COUNT = "count";
//
// }
|
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.sources.BaseRelation;
import org.apache.spark.sql.sources.RelationProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.K;
import scala.collection.immutable.Map;
|
package net.jgp.labs.spark.x.datasource;
public class SubStringCounterDataSource implements RelationProvider {
private static transient Logger log = LoggerFactory.getLogger(
SubStringCounterDataSource.class);
@Override
public BaseRelation createRelation(SQLContext arg0, Map<String,
String> arg1) {
log.debug("-> createRelation()");
java.util.Map<String, String> javaMap = scala.collection.JavaConverters
.mapAsJavaMapConverter(arg1).asJava();
SubStringCounterRelation br = new SubStringCounterRelation();
br.setSqlContext(arg0);
for (java.util.Map.Entry<String, String> entry : javaMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
log.debug("[{}] --> [{}]", key, value);
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/K.java
// public class K {
//
// public static final String PATH = "path";
// public static final String COUNT = "count";
//
// }
// Path: src/main/java/net/jgp/labs/spark/x/datasource/SubStringCounterDataSource.java
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.sources.BaseRelation;
import org.apache.spark.sql.sources.RelationProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.K;
import scala.collection.immutable.Map;
package net.jgp.labs.spark.x.datasource;
public class SubStringCounterDataSource implements RelationProvider {
private static transient Logger log = LoggerFactory.getLogger(
SubStringCounterDataSource.class);
@Override
public BaseRelation createRelation(SQLContext arg0, Map<String,
String> arg1) {
log.debug("-> createRelation()");
java.util.Map<String, String> javaMap = scala.collection.JavaConverters
.mapAsJavaMapConverter(arg1).asJava();
SubStringCounterRelation br = new SubStringCounterRelation();
br.setSqlContext(arg0);
for (java.util.Map.Entry<String, String> entry : javaMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
log.debug("[{}] --> [{}]", key, value);
|
if (key.compareTo(K.PATH) == 0) {
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/x/utils/record_generator/RecordWriterUtils.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
package net.jgp.labs.spark.x.utils.record_generator;
public abstract class RecordWriterUtils {
public static void write(String filename, StringBuilder record) {
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/x/utils/record_generator/RecordWriterUtils.java
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
package net.jgp.labs.spark.x.utils.record_generator;
public abstract class RecordWriterUtils {
public static void write(String filename, StringBuilder record) {
|
String fullFilename = StreamingUtils.getInputDirectory() + filename;
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l250_map/l030_dataset_book_json/CsvToDatasetBookAsJson.java
|
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/BookJson.java
// public class BookJson {
// private Book b;
//
// /**
// * @return the b
// */
// public JSONObject getBook() {
// ObjectMapper mapper = new ObjectMapper();
// String bookJson;
// try {
// bookJson = mapper.writeValueAsString(this.b);
// } catch (JsonProcessingException e) {
// bookJson = "N/A";
// }
//
// JSONObject jo = new JSONObject(bookJson);
// return jo;
// }
//
// /**
// * @param b
// * the b to set
// */
// public void setBook(Book b) {
// this.b = b;
// }
//
// }
|
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.json.JSONObject;
import net.jgp.labs.spark.x.model.Book;
import net.jgp.labs.spark.x.model.BookJson;
|
package net.jgp.labs.spark.l250_map.l030_dataset_book_json;
public class CsvToDatasetBookAsJson implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
class BookMapper implements MapFunction<Row, String> {
private static final long serialVersionUID = -8940709795225426457L;
@Override
public String call(Row value) throws Exception {
|
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/BookJson.java
// public class BookJson {
// private Book b;
//
// /**
// * @return the b
// */
// public JSONObject getBook() {
// ObjectMapper mapper = new ObjectMapper();
// String bookJson;
// try {
// bookJson = mapper.writeValueAsString(this.b);
// } catch (JsonProcessingException e) {
// bookJson = "N/A";
// }
//
// JSONObject jo = new JSONObject(bookJson);
// return jo;
// }
//
// /**
// * @param b
// * the b to set
// */
// public void setBook(Book b) {
// this.b = b;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l250_map/l030_dataset_book_json/CsvToDatasetBookAsJson.java
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.json.JSONObject;
import net.jgp.labs.spark.x.model.Book;
import net.jgp.labs.spark.x.model.BookJson;
package net.jgp.labs.spark.l250_map.l030_dataset_book_json;
public class CsvToDatasetBookAsJson implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
class BookMapper implements MapFunction<Row, String> {
private static final long serialVersionUID = -8940709795225426457L;
@Override
public String call(Row value) throws Exception {
|
Book b = new Book();
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l250_map/l030_dataset_book_json/CsvToDatasetBookAsJson.java
|
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/BookJson.java
// public class BookJson {
// private Book b;
//
// /**
// * @return the b
// */
// public JSONObject getBook() {
// ObjectMapper mapper = new ObjectMapper();
// String bookJson;
// try {
// bookJson = mapper.writeValueAsString(this.b);
// } catch (JsonProcessingException e) {
// bookJson = "N/A";
// }
//
// JSONObject jo = new JSONObject(bookJson);
// return jo;
// }
//
// /**
// * @param b
// * the b to set
// */
// public void setBook(Book b) {
// this.b = b;
// }
//
// }
|
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.json.JSONObject;
import net.jgp.labs.spark.x.model.Book;
import net.jgp.labs.spark.x.model.BookJson;
|
package net.jgp.labs.spark.l250_map.l030_dataset_book_json;
public class CsvToDatasetBookAsJson implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
class BookMapper implements MapFunction<Row, String> {
private static final long serialVersionUID = -8940709795225426457L;
@Override
public String call(Row value) throws Exception {
Book b = new Book();
b.setId(value.getAs("id"));
b.setAuthorId(value.getAs("authorId"));
b.setLink(value.getAs("link"));
SimpleDateFormat parser = new SimpleDateFormat("M/d/yy");
String stringAsDate = value.getAs("releaseDate");
if (stringAsDate == null) {
b.setReleaseDate(null);
} else {
b.setReleaseDate(parser.parse(stringAsDate));
}
b.setTitle(value.getAs("title"));
|
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/BookJson.java
// public class BookJson {
// private Book b;
//
// /**
// * @return the b
// */
// public JSONObject getBook() {
// ObjectMapper mapper = new ObjectMapper();
// String bookJson;
// try {
// bookJson = mapper.writeValueAsString(this.b);
// } catch (JsonProcessingException e) {
// bookJson = "N/A";
// }
//
// JSONObject jo = new JSONObject(bookJson);
// return jo;
// }
//
// /**
// * @param b
// * the b to set
// */
// public void setBook(Book b) {
// this.b = b;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l250_map/l030_dataset_book_json/CsvToDatasetBookAsJson.java
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.json.JSONObject;
import net.jgp.labs.spark.x.model.Book;
import net.jgp.labs.spark.x.model.BookJson;
package net.jgp.labs.spark.l250_map.l030_dataset_book_json;
public class CsvToDatasetBookAsJson implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
class BookMapper implements MapFunction<Row, String> {
private static final long serialVersionUID = -8940709795225426457L;
@Override
public String call(Row value) throws Exception {
Book b = new Book();
b.setId(value.getAs("id"));
b.setAuthorId(value.getAs("authorId"));
b.setLink(value.getAs("link"));
SimpleDateFormat parser = new SimpleDateFormat("M/d/yy");
String stringAsDate = value.getAs("releaseDate");
if (stringAsDate == null) {
b.setReleaseDate(null);
} else {
b.setReleaseDate(parser.parse(stringAsDate));
}
b.setTitle(value.getAs("title"));
|
BookJson bj = new BookJson();
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookToDataframeApp.java
|
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookApp.java
// class BookMapper implements MapFunction<Row, Book> {
// private static final long serialVersionUID = -8940709795225426457L;
//
// @Override
// public Book call(Row value) throws Exception {
// Book b = new Book();
// b.setId(value.getAs("id"));
// b.setAuthorId(value.getAs("authorId"));
// b.setLink(value.getAs("link"));
// SimpleDateFormat parser = new SimpleDateFormat("M/d/yy");
// String stringAsDate = value.getAs("releaseDate");
// if (stringAsDate == null) {
// b.setReleaseDate(null);
// } else {
// b.setReleaseDate(parser.parse(stringAsDate));
// }
// b.setTitle(value.getAs("title"));
// return b;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
|
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import net.jgp.labs.spark.l250_map.l020_dataset_book.CsvToDatasetBookApp.BookMapper;
import net.jgp.labs.spark.x.model.Book;
|
package net.jgp.labs.spark.l250_map.l020_dataset_book;
public class CsvToDatasetBookToDataframeApp implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
|
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookApp.java
// class BookMapper implements MapFunction<Row, Book> {
// private static final long serialVersionUID = -8940709795225426457L;
//
// @Override
// public Book call(Row value) throws Exception {
// Book b = new Book();
// b.setId(value.getAs("id"));
// b.setAuthorId(value.getAs("authorId"));
// b.setLink(value.getAs("link"));
// SimpleDateFormat parser = new SimpleDateFormat("M/d/yy");
// String stringAsDate = value.getAs("releaseDate");
// if (stringAsDate == null) {
// b.setReleaseDate(null);
// } else {
// b.setReleaseDate(parser.parse(stringAsDate));
// }
// b.setTitle(value.getAs("title"));
// return b;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookToDataframeApp.java
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import net.jgp.labs.spark.l250_map.l020_dataset_book.CsvToDatasetBookApp.BookMapper;
import net.jgp.labs.spark.x.model.Book;
package net.jgp.labs.spark.l250_map.l020_dataset_book;
public class CsvToDatasetBookToDataframeApp implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
|
class BookMapper implements MapFunction<Row, Book> {
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookToDataframeApp.java
|
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookApp.java
// class BookMapper implements MapFunction<Row, Book> {
// private static final long serialVersionUID = -8940709795225426457L;
//
// @Override
// public Book call(Row value) throws Exception {
// Book b = new Book();
// b.setId(value.getAs("id"));
// b.setAuthorId(value.getAs("authorId"));
// b.setLink(value.getAs("link"));
// SimpleDateFormat parser = new SimpleDateFormat("M/d/yy");
// String stringAsDate = value.getAs("releaseDate");
// if (stringAsDate == null) {
// b.setReleaseDate(null);
// } else {
// b.setReleaseDate(parser.parse(stringAsDate));
// }
// b.setTitle(value.getAs("title"));
// return b;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
|
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import net.jgp.labs.spark.l250_map.l020_dataset_book.CsvToDatasetBookApp.BookMapper;
import net.jgp.labs.spark.x.model.Book;
|
package net.jgp.labs.spark.l250_map.l020_dataset_book;
public class CsvToDatasetBookToDataframeApp implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
|
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookApp.java
// class BookMapper implements MapFunction<Row, Book> {
// private static final long serialVersionUID = -8940709795225426457L;
//
// @Override
// public Book call(Row value) throws Exception {
// Book b = new Book();
// b.setId(value.getAs("id"));
// b.setAuthorId(value.getAs("authorId"));
// b.setLink(value.getAs("link"));
// SimpleDateFormat parser = new SimpleDateFormat("M/d/yy");
// String stringAsDate = value.getAs("releaseDate");
// if (stringAsDate == null) {
// b.setReleaseDate(null);
// } else {
// b.setReleaseDate(parser.parse(stringAsDate));
// }
// b.setTitle(value.getAs("title"));
// return b;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookToDataframeApp.java
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import net.jgp.labs.spark.l250_map.l020_dataset_book.CsvToDatasetBookApp.BookMapper;
import net.jgp.labs.spark.x.model.Book;
package net.jgp.labs.spark.l250_map.l020_dataset_book;
public class CsvToDatasetBookToDataframeApp implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
|
class BookMapper implements MapFunction<Row, Book> {
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l020_dstream/l000_filesystem_text/StreamingIngestionFileSystemTextFileApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
package net.jgp.labs.spark.l020_dstream.l000_filesystem_text;
public class StreamingIngestionFileSystemTextFileApp implements Serializable {
private static final long serialVersionUID = 6795623748995704732L;
public static void main(String[] args) {
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l020_dstream/l000_filesystem_text/StreamingIngestionFileSystemTextFileApp.java
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
package net.jgp.labs.spark.l020_dstream.l000_filesystem_text;
public class StreamingIngestionFileSystemTextFileApp implements Serializable {
private static final long serialVersionUID = 6795623748995704732L;
public static void main(String[] args) {
|
StreamingUtils.createInputDirectory();
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l020_dstream/l010_filesystem_text_dataframe/StreamingIngestionFileSystemTextFileToDataframeApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/JavaSparkSessionSingleton.java
// public class JavaSparkSessionSingleton {
// private static transient SparkSession instance = null;
//
// public static SparkSession getInstance(SparkConf sparkConf) {
// if (instance == null) {
// instance = SparkSession.builder().config(sparkConf).getOrCreate();
// }
// return instance;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.JavaSparkSessionSingleton;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
package net.jgp.labs.spark.l020_dstream.l010_filesystem_text_dataframe;
public class StreamingIngestionFileSystemTextFileToDataframeApp implements
Serializable {
private static final long serialVersionUID = 6795623748995704732L;
public static void main(String[] args) {
StreamingIngestionFileSystemTextFileToDataframeApp app =
new StreamingIngestionFileSystemTextFileToDataframeApp();
app.start();
}
private void start() {
// Create a local StreamingContext with two working thread and batch
// interval of
// 1 second
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName(
"Streaming Ingestion File System Text File to Dataframe");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations
.seconds(5));
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/JavaSparkSessionSingleton.java
// public class JavaSparkSessionSingleton {
// private static transient SparkSession instance = null;
//
// public static SparkSession getInstance(SparkConf sparkConf) {
// if (instance == null) {
// instance = SparkSession.builder().config(sparkConf).getOrCreate();
// }
// return instance;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l020_dstream/l010_filesystem_text_dataframe/StreamingIngestionFileSystemTextFileToDataframeApp.java
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.JavaSparkSessionSingleton;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
package net.jgp.labs.spark.l020_dstream.l010_filesystem_text_dataframe;
public class StreamingIngestionFileSystemTextFileToDataframeApp implements
Serializable {
private static final long serialVersionUID = 6795623748995704732L;
public static void main(String[] args) {
StreamingIngestionFileSystemTextFileToDataframeApp app =
new StreamingIngestionFileSystemTextFileToDataframeApp();
app.start();
}
private void start() {
// Create a local StreamingContext with two working thread and batch
// interval of
// 1 second
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName(
"Streaming Ingestion File System Text File to Dataframe");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations
.seconds(5));
|
JavaDStream<String> msgDataStream = jssc.textFileStream(StreamingUtils
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l020_dstream/l010_filesystem_text_dataframe/StreamingIngestionFileSystemTextFileToDataframeApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/JavaSparkSessionSingleton.java
// public class JavaSparkSessionSingleton {
// private static transient SparkSession instance = null;
//
// public static SparkSession getInstance(SparkConf sparkConf) {
// if (instance == null) {
// instance = SparkSession.builder().config(sparkConf).getOrCreate();
// }
// return instance;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.JavaSparkSessionSingleton;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName(
"Streaming Ingestion File System Text File to Dataframe");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations
.seconds(5));
JavaDStream<String> msgDataStream = jssc.textFileStream(StreamingUtils
.getInputDirectory());
msgDataStream.print();
// Create JavaRDD<Row>
msgDataStream.foreachRDD(new VoidFunction<JavaRDD<String>>() {
private static final long serialVersionUID = -590010339928376829L;
@Override
public void call(JavaRDD<String> rdd) {
JavaRDD<Row> rowRDD = rdd.map(new Function<String, Row>() {
private static final long serialVersionUID = 5167089361335095997L;
@Override
public Row call(String msg) {
Row row = RowFactory.create(msg);
return row;
}
});
// Create Schema
StructType schema = DataTypes.createStructType(
new StructField[] { DataTypes.createStructField("Message",
DataTypes.StringType, true) });
// Get Spark 2.0 session
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/JavaSparkSessionSingleton.java
// public class JavaSparkSessionSingleton {
// private static transient SparkSession instance = null;
//
// public static SparkSession getInstance(SparkConf sparkConf) {
// if (instance == null) {
// instance = SparkSession.builder().config(sparkConf).getOrCreate();
// }
// return instance;
// }
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l020_dstream/l010_filesystem_text_dataframe/StreamingIngestionFileSystemTextFileToDataframeApp.java
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.JavaSparkSessionSingleton;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName(
"Streaming Ingestion File System Text File to Dataframe");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations
.seconds(5));
JavaDStream<String> msgDataStream = jssc.textFileStream(StreamingUtils
.getInputDirectory());
msgDataStream.print();
// Create JavaRDD<Row>
msgDataStream.foreachRDD(new VoidFunction<JavaRDD<String>>() {
private static final long serialVersionUID = -590010339928376829L;
@Override
public void call(JavaRDD<String> rdd) {
JavaRDD<Row> rowRDD = rdd.map(new Function<String, Row>() {
private static final long serialVersionUID = 5167089361335095997L;
@Override
public Row call(String msg) {
Row row = RowFactory.create(msg);
return row;
}
});
// Create Schema
StructType schema = DataTypes.createStructType(
new StructField[] { DataTypes.createStructField("Message",
DataTypes.StringType, true) });
// Get Spark 2.0 session
|
SparkSession spark = JavaSparkSessionSingleton.getInstance(rdd.context()
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l020_dstream/l020_filesystem_text_dataframe_class/StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
package net.jgp.labs.spark.l020_dstream.l020_filesystem_text_dataframe_class;
public class StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp
implements Serializable {
private static final long serialVersionUID = 6795623748995704732L;
public static void main(String[] args) {
StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp app =
new StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp();
app.start();
}
private void start() {
// Create a local StreamingContext with two working thread and batch
// interval of
// 1 second
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName(
"Streaming Ingestion File System Text File to Dataframe");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations
.seconds(5));
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l020_dstream/l020_filesystem_text_dataframe_class/StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp.java
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
package net.jgp.labs.spark.l020_dstream.l020_filesystem_text_dataframe_class;
public class StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp
implements Serializable {
private static final long serialVersionUID = 6795623748995704732L;
public static void main(String[] args) {
StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp app =
new StreamingIngestionFileSystemTextFileToDataframeMultipleClassesApp();
app.start();
}
private void start() {
// Create a local StreamingContext with two working thread and batch
// interval of
// 1 second
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName(
"Streaming Ingestion File System Text File to Dataframe");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations
.seconds(5));
|
JavaDStream<String> msgDataStream = jssc.textFileStream(StreamingUtils
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l150_udf/l201_uuid_with_seed/AddSeededUuidInDataframeApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/UuidWithOneColumnSeedGenerator.java
// public class UuidWithOneColumnSeedGenerator implements UDF1<String, String> {
//
// private static final long serialVersionUID = -455996424L;
//
// @Override
// public String call(String seed) throws Exception {
// return UUID.nameUUIDFromBytes(seed.getBytes()).toString();
// }
//
// }
|
import static org.apache.spark.sql.functions.callUDF;
import static org.apache.spark.sql.functions.col;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import net.jgp.labs.spark.x.udf.UuidWithOneColumnSeedGenerator;
|
package net.jgp.labs.spark.l150_udf.l201_uuid_with_seed;
/**
* Reads a CSV file and generates a random UUID for each record, using the
* UDF built in <code>UuidOneColumnGenerator</code>, in the
* <code>net.jgp.labs.spark.x.udf</code> package.
*
* You can observe that:
*
* Author <code>Jean-Georges Perrin</code> with id 2 and id 16 have the same
* UUID: <code>f42ad295-0377-36d8-833d-907af04760c4</code>.
*
* Author <code>Jean Georges Perrin</code> with id 17 has a different UUID:
* <code>dd476960-cc6f-3e89-a792-ae144056cbef</code>.
*
* @author jgp
*/
public class AddSeededUuidInDataframeApp {
public static void main(String[] args) {
AddSeededUuidInDataframeApp app = new AddSeededUuidInDataframeApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Seeded UUID in dataframe app")
.master("local[*]")
.getOrCreate();
spark.udf().register(
"uuid1",
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/UuidWithOneColumnSeedGenerator.java
// public class UuidWithOneColumnSeedGenerator implements UDF1<String, String> {
//
// private static final long serialVersionUID = -455996424L;
//
// @Override
// public String call(String seed) throws Exception {
// return UUID.nameUUIDFromBytes(seed.getBytes()).toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l150_udf/l201_uuid_with_seed/AddSeededUuidInDataframeApp.java
import static org.apache.spark.sql.functions.callUDF;
import static org.apache.spark.sql.functions.col;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import net.jgp.labs.spark.x.udf.UuidWithOneColumnSeedGenerator;
package net.jgp.labs.spark.l150_udf.l201_uuid_with_seed;
/**
* Reads a CSV file and generates a random UUID for each record, using the
* UDF built in <code>UuidOneColumnGenerator</code>, in the
* <code>net.jgp.labs.spark.x.udf</code> package.
*
* You can observe that:
*
* Author <code>Jean-Georges Perrin</code> with id 2 and id 16 have the same
* UUID: <code>f42ad295-0377-36d8-833d-907af04760c4</code>.
*
* Author <code>Jean Georges Perrin</code> with id 17 has a different UUID:
* <code>dd476960-cc6f-3e89-a792-ae144056cbef</code>.
*
* @author jgp
*/
public class AddSeededUuidInDataframeApp {
public static void main(String[] args) {
AddSeededUuidInDataframeApp app = new AddSeededUuidInDataframeApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Seeded UUID in dataframe app")
.master("local[*]")
.getOrCreate();
spark.udf().register(
"uuid1",
|
new UuidWithOneColumnSeedGenerator(),
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l090_metadata/l000_add_metadata/AddMetadataApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/ColumnUtils.java
// public class ColumnUtils {
//
// public static String explain(Column col) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(col.toString());
//
// return sb.toString();
// }
//
// public static Metadata getMetadata(Dataset<Row> df, String colName) {
// StructType schema = df.schema();
// StructField[] fields = schema.fields();
// for (StructField field : fields) {
// // TODO check on case
// if (field.name().compareTo(colName) == 0) {
// return field.metadata();
// }
// }
// return null;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
|
import static org.apache.spark.sql.functions.col;
import java.util.Date;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.ColumnUtils;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
|
package net.jgp.labs.spark.l090_metadata.l000_add_metadata;
public class AddMetadataApp {
public static void main(String[] args) {
AddMetadataApp app = new AddMetadataApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Modifying metadata")
.master("local[*]")
.getOrCreate();
String format = "csv";
String filename = "data/books.csv";
Dataset<Row> df = spark.read().format(format)
.option("inferSchema", true)
.option("header", true)
.load(filename);
// Step 1 - Flat read out
System.out.println("-------");
System.out.println("Step #1 - Flat read out");
System.out.println("-------");
df.show();
df.printSchema();
System.out.println("Full read-out of metadata");
for (StructField field : df.schema().fields()) {
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/ColumnUtils.java
// public class ColumnUtils {
//
// public static String explain(Column col) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(col.toString());
//
// return sb.toString();
// }
//
// public static Metadata getMetadata(Dataset<Row> df, String colName) {
// StructType schema = df.schema();
// StructField[] fields = schema.fields();
// for (StructField field : fields) {
// // TODO check on case
// if (field.name().compareTo(colName) == 0) {
// return field.metadata();
// }
// }
// return null;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l090_metadata/l000_add_metadata/AddMetadataApp.java
import static org.apache.spark.sql.functions.col;
import java.util.Date;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.ColumnUtils;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
package net.jgp.labs.spark.l090_metadata.l000_add_metadata;
public class AddMetadataApp {
public static void main(String[] args) {
AddMetadataApp app = new AddMetadataApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Modifying metadata")
.master("local[*]")
.getOrCreate();
String format = "csv";
String filename = "data/books.csv";
Dataset<Row> df = spark.read().format(format)
.option("inferSchema", true)
.option("header", true)
.load(filename);
// Step 1 - Flat read out
System.out.println("-------");
System.out.println("Step #1 - Flat read out");
System.out.println("-------");
df.show();
df.printSchema();
System.out.println("Full read-out of metadata");
for (StructField field : df.schema().fields()) {
|
System.out.println(FieldUtils.explain(field));
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l090_metadata/l000_add_metadata/AddMetadataApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/ColumnUtils.java
// public class ColumnUtils {
//
// public static String explain(Column col) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(col.toString());
//
// return sb.toString();
// }
//
// public static Metadata getMetadata(Dataset<Row> df, String colName) {
// StructType schema = df.schema();
// StructField[] fields = schema.fields();
// for (StructField field : fields) {
// // TODO check on case
// if (field.name().compareTo(colName) == 0) {
// return field.metadata();
// }
// }
// return null;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
|
import static org.apache.spark.sql.functions.col;
import java.util.Date;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.ColumnUtils;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
|
System.out.println(FieldUtils.explain(field));
}
// Step 2 - Add custom metadata
System.out.println("-------");
System.out.println("Step #2 - Add custom metadata");
System.out.println("-------");
// Adding x-source, x-format, x-order
long i = 0;
for (String colName : df.columns()) {
Column col = col(colName);
Metadata metadata = new MetadataBuilder()
.putString("x-source", filename)
.putString("x-format", format)
.putLong("x-order", i++)
.build();
System.out.println("Metadata added to column: " + col);
df = df.withColumn(colName, col, metadata);
}
df.printSchema();
System.out.println("Full read-out of metadata");
for (StructField field : df.schema().fields()) {
System.out.println(FieldUtils.explain(field));
}
// Adding x-process-date
for (String colName : df.columns()) {
Column col = col(colName);
Metadata metadata = new MetadataBuilder()
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/ColumnUtils.java
// public class ColumnUtils {
//
// public static String explain(Column col) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(col.toString());
//
// return sb.toString();
// }
//
// public static Metadata getMetadata(Dataset<Row> df, String colName) {
// StructType schema = df.schema();
// StructField[] fields = schema.fields();
// for (StructField field : fields) {
// // TODO check on case
// if (field.name().compareTo(colName) == 0) {
// return field.metadata();
// }
// }
// return null;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l090_metadata/l000_add_metadata/AddMetadataApp.java
import static org.apache.spark.sql.functions.col;
import java.util.Date;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.ColumnUtils;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
System.out.println(FieldUtils.explain(field));
}
// Step 2 - Add custom metadata
System.out.println("-------");
System.out.println("Step #2 - Add custom metadata");
System.out.println("-------");
// Adding x-source, x-format, x-order
long i = 0;
for (String colName : df.columns()) {
Column col = col(colName);
Metadata metadata = new MetadataBuilder()
.putString("x-source", filename)
.putString("x-format", format)
.putLong("x-order", i++)
.build();
System.out.println("Metadata added to column: " + col);
df = df.withColumn(colName, col, metadata);
}
df.printSchema();
System.out.println("Full read-out of metadata");
for (StructField field : df.schema().fields()) {
System.out.println(FieldUtils.explain(field));
}
// Adding x-process-date
for (String colName : df.columns()) {
Column col = col(colName);
Metadata metadata = new MetadataBuilder()
|
.withMetadata(ColumnUtils.getMetadata(df, colName))
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l090_metadata/l000_add_metadata/AddMetadataApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/ColumnUtils.java
// public class ColumnUtils {
//
// public static String explain(Column col) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(col.toString());
//
// return sb.toString();
// }
//
// public static Metadata getMetadata(Dataset<Row> df, String colName) {
// StructType schema = df.schema();
// StructField[] fields = schema.fields();
// for (StructField field : fields) {
// // TODO check on case
// if (field.name().compareTo(colName) == 0) {
// return field.metadata();
// }
// }
// return null;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
|
import static org.apache.spark.sql.functions.col;
import java.util.Date;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.ColumnUtils;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
|
.build();
System.out.println("Metadata added to column: " + col);
df = df.withColumn(colName, col, metadata);
}
df.printSchema();
System.out.println("Full read-out of metadata");
for (StructField field : df.schema().fields()) {
System.out.println(FieldUtils.explain(field));
}
// Adding x-process-date
for (String colName : df.columns()) {
Column col = col(colName);
Metadata metadata = new MetadataBuilder()
.withMetadata(ColumnUtils.getMetadata(df, colName))
.putString("x-process-date", new Date().toString())
.build();
System.out.println("Metadata added to column: " + col);
df = df.withColumn(colName, col, metadata);
}
df.printSchema();
// Step #3 - Adding more metadata
System.out.println("-------");
System.out.println("Pass #3 - Adding more metadata");
System.out.println("-------");
// Adding x-user
for (String colName : df.columns()) {
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/ColumnUtils.java
// public class ColumnUtils {
//
// public static String explain(Column col) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(col.toString());
//
// return sb.toString();
// }
//
// public static Metadata getMetadata(Dataset<Row> df, String colName) {
// StructType schema = df.schema();
// StructField[] fields = schema.fields();
// for (StructField field : fields) {
// // TODO check on case
// if (field.name().compareTo(colName) == 0) {
// return field.metadata();
// }
// }
// return null;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l090_metadata/l000_add_metadata/AddMetadataApp.java
import static org.apache.spark.sql.functions.col;
import java.util.Date;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.ColumnUtils;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
.build();
System.out.println("Metadata added to column: " + col);
df = df.withColumn(colName, col, metadata);
}
df.printSchema();
System.out.println("Full read-out of metadata");
for (StructField field : df.schema().fields()) {
System.out.println(FieldUtils.explain(field));
}
// Adding x-process-date
for (String colName : df.columns()) {
Column col = col(colName);
Metadata metadata = new MetadataBuilder()
.withMetadata(ColumnUtils.getMetadata(df, colName))
.putString("x-process-date", new Date().toString())
.build();
System.out.println("Metadata added to column: " + col);
df = df.withColumn(colName, col, metadata);
}
df.printSchema();
// Step #3 - Adding more metadata
System.out.println("-------");
System.out.println("Pass #3 - Adding more metadata");
System.out.println("-------");
// Adding x-user
for (String colName : df.columns()) {
|
df = DataframeUtils.addMetadata(df, colName, "x-user", "jgp");
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
|
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import net.jgp.labs.spark.x.model.Book;
|
package net.jgp.labs.spark.l250_map.l020_dataset_book;
public class CsvToDatasetBookApp implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
|
// Path: src/main/java/net/jgp/labs/spark/x/model/Book.java
// public class Book {
// int id;
// int authorId;
// String title;
// Date releaseDate;
// String link;
//
// /**
// * @return the id
// */
// public int getId() {
// return id;
// }
//
// /**
// * @param id
// * the id to set
// */
// public void setId(int id) {
// this.id = id;
// }
//
// /**
// * @return the authorId
// */
// public int getAuthorId() {
// return authorId;
// }
//
// public void setAuthorId(int authorId) {
// this.authorId = authorId;
// }
//
// /**
// * @param authorId
// * the authorId to set
// */
// public void setAuthorId(Integer authorId) {
// if (authorId == null) {
// this.authorId = 0;
// } else {
// this.authorId = authorId;
// }
// }
//
// /**
// * @return the title
// */
// public String getTitle() {
// return title;
// }
//
// /**
// * @param title
// * the title to set
// */
// public void setTitle(String title) {
// this.title = title;
// }
//
// /**
// * @return the releaseDate
// */
// public Date getReleaseDate() {
// return releaseDate;
// }
//
// /**
// * @param releaseDate
// * the releaseDate to set
// */
// public void setReleaseDate(Date releaseDate) {
// this.releaseDate = releaseDate;
// }
//
// /**
// * @return the link
// */
// public String getLink() {
// return link;
// }
//
// /**
// * @param link
// * the link to set
// */
// public void setLink(String link) {
// this.link = link;
// }
// }
// Path: src/main/java/net/jgp/labs/spark/l250_map/l020_dataset_book/CsvToDatasetBookApp.java
import java.io.Serializable;
import java.text.SimpleDateFormat;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import net.jgp.labs.spark.x.model.Book;
package net.jgp.labs.spark.l250_map.l020_dataset_book;
public class CsvToDatasetBookApp implements Serializable {
private static final long serialVersionUID = 4262746489728980066L;
|
class BookMapper implements MapFunction<Row, Book> {
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l090_metadata/l100_join_metadata/AddAuthorsAndBooksMetadataApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
|
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
|
package net.jgp.labs.spark.l090_metadata.l100_join_metadata;
public class AddAuthorsAndBooksMetadataApp {
public static void main(String[] args) {
AddAuthorsAndBooksMetadataApp app = new AddAuthorsAndBooksMetadataApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Authors and Books metadata")
.master("local[*]").getOrCreate();
String filename = "data/authors.csv";
Dataset<Row> authorsDf = spark.read()
.format("csv")
.option("inferSchema", true)
.option("header", true)
.load(filename);
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l090_metadata/l100_join_metadata/AddAuthorsAndBooksMetadataApp.java
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
package net.jgp.labs.spark.l090_metadata.l100_join_metadata;
public class AddAuthorsAndBooksMetadataApp {
public static void main(String[] args) {
AddAuthorsAndBooksMetadataApp app = new AddAuthorsAndBooksMetadataApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Authors and Books metadata")
.master("local[*]").getOrCreate();
String filename = "data/authors.csv";
Dataset<Row> authorsDf = spark.read()
.format("csv")
.option("inferSchema", true)
.option("header", true)
.load(filename);
|
authorsDf = DataframeUtils.addMetadata(authorsDf, "x-source", filename);
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l090_metadata/l100_join_metadata/AddAuthorsAndBooksMetadataApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
|
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
|
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Authors and Books metadata")
.master("local[*]").getOrCreate();
String filename = "data/authors.csv";
Dataset<Row> authorsDf = spark.read()
.format("csv")
.option("inferSchema", true)
.option("header", true)
.load(filename);
authorsDf = DataframeUtils.addMetadata(authorsDf, "x-source", filename);
filename = "data/books.csv";
Dataset<Row> booksDf = spark.read()
.format("csv")
.option("inferSchema", true)
.option("header", true)
.load(filename);
booksDf = DataframeUtils.addMetadata(booksDf, "x-source", filename);
Dataset<Row> libraryDf = authorsDf.join(booksDf, authorsDf.col("id")
.equalTo(booksDf.col("authorId")), "full_outer");
libraryDf.show();
libraryDf.printSchema();
System.out.println("Output of joining metadata");
System.out.println("--------------------------");
for (StructField field : libraryDf.schema().fields()) {
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/DataframeUtils.java
// public class DataframeUtils {
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String colName,
// String key, String value) {
// Metadata metadata = new MetadataBuilder()
// .withMetadata(ColumnUtils.getMetadata(df, colName))
// .putString(key, value)
// .build();
// Column col = col(colName);
// return df.withColumn(colName, col, metadata);
// }
//
// public static Dataset<Row> addMetadata(Dataset<Row> df, String key,
// String value) {
// for (String colName : df.columns()) {
// df = addMetadata(df, colName, key, value);
// }
// return df;
// }
//
// }
//
// Path: src/main/java/net/jgp/labs/spark/x/utils/FieldUtils.java
// public class FieldUtils {
//
// public static String explain(StructField field) {
// StringBuilder sb = new StringBuilder();
//
// sb.append("Name ....... ");
// sb.append(field.name());
// sb.append("\nMetadata ... ");
// sb.append(field.metadata());
// sb.append("\nType ....... ");
// sb.append(field.dataType());
//
// return sb.toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l090_metadata/l100_join_metadata/AddAuthorsAndBooksMetadataApp.java
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;
import net.jgp.labs.spark.x.utils.DataframeUtils;
import net.jgp.labs.spark.x.utils.FieldUtils;
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Authors and Books metadata")
.master("local[*]").getOrCreate();
String filename = "data/authors.csv";
Dataset<Row> authorsDf = spark.read()
.format("csv")
.option("inferSchema", true)
.option("header", true)
.load(filename);
authorsDf = DataframeUtils.addMetadata(authorsDf, "x-source", filename);
filename = "data/books.csv";
Dataset<Row> booksDf = spark.read()
.format("csv")
.option("inferSchema", true)
.option("header", true)
.load(filename);
booksDf = DataframeUtils.addMetadata(booksDf, "x-source", filename);
Dataset<Row> libraryDf = authorsDf.join(booksDf, authorsDf.col("id")
.equalTo(booksDf.col("authorId")), "full_outer");
libraryDf.show();
libraryDf.printSchema();
System.out.println("Output of joining metadata");
System.out.println("--------------------------");
for (StructField field : libraryDf.schema().fields()) {
|
System.out.println(FieldUtils.explain(field));
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l150_udf/l200_uuid/AddRandomUuidInDataframeApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/UuidRandomGenerator.java
// public class UuidRandomGenerator implements UDF0<String> {
//
// private static final long serialVersionUID = -455996424L;
//
// @Override
// public String call() throws Exception {
// return UUID.randomUUID().toString();
// }
//
// }
|
import static org.apache.spark.sql.functions.callUDF;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import net.jgp.labs.spark.x.udf.UuidRandomGenerator;
|
package net.jgp.labs.spark.l150_udf.l200_uuid;
/**
* Reads a CSV file and generates a random UUID for each record, using the
* UDF built in <code>UuidRandomGenerator</code>, in the
* <code>net.jgp.labs.spark.x.udf</code> package.
*
* @author jgp
*
*/
public class AddRandomUuidInDataframeApp {
public static void main(String[] args) {
AddRandomUuidInDataframeApp app = new AddRandomUuidInDataframeApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Random UUID in dataframe app")
.master("local[*]")
.getOrCreate();
spark.udf().register(
"uuid_random",
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/UuidRandomGenerator.java
// public class UuidRandomGenerator implements UDF0<String> {
//
// private static final long serialVersionUID = -455996424L;
//
// @Override
// public String call() throws Exception {
// return UUID.randomUUID().toString();
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l150_udf/l200_uuid/AddRandomUuidInDataframeApp.java
import static org.apache.spark.sql.functions.callUDF;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import net.jgp.labs.spark.x.udf.UuidRandomGenerator;
package net.jgp.labs.spark.l150_udf.l200_uuid;
/**
* Reads a CSV file and generates a random UUID for each record, using the
* UDF built in <code>UuidRandomGenerator</code>, in the
* <code>net.jgp.labs.spark.x.udf</code> package.
*
* @author jgp
*
*/
public class AddRandomUuidInDataframeApp {
public static void main(String[] args) {
AddRandomUuidInDataframeApp app = new AddRandomUuidInDataframeApp();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder()
.appName("Random UUID in dataframe app")
.master("local[*]")
.getOrCreate();
spark.udf().register(
"uuid_random",
|
new UuidRandomGenerator(),
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l801_pi_in_progress/PiApp.java
|
// Path: src/main/java/net/jgp/labs/spark/x/datasource/SubStringCounterDataSource.java
// public class SubStringCounterDataSource implements RelationProvider {
// private static transient Logger log = LoggerFactory.getLogger(
// SubStringCounterDataSource.class);
//
// @Override
// public BaseRelation createRelation(SQLContext arg0, Map<String,
// String> arg1) {
// log.debug("-> createRelation()");
//
// java.util.Map<String, String> javaMap = scala.collection.JavaConverters
// .mapAsJavaMapConverter(arg1).asJava();
//
// SubStringCounterRelation br = new SubStringCounterRelation();
// br.setSqlContext(arg0);
//
// for (java.util.Map.Entry<String, String> entry : javaMap.entrySet()) {
// String key = entry.getKey();
// String value = entry.getValue();
// log.debug("[{}] --> [{}]", key, value);
// if (key.compareTo(K.PATH) == 0) {
// br.setFilename(value);
// } else if (key.startsWith(K.COUNT)) {
// br.addCriteria(value);
// }
// }
//
// return br;
// }
//
// }
|
import java.util.ArrayList;
import java.util.List;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.datasource.SubStringCounterDataSource;
|
package net.jgp.labs.spark.l801_pi_in_progress;
public class PiApp {
private static transient Logger log = LoggerFactory.getLogger(
|
// Path: src/main/java/net/jgp/labs/spark/x/datasource/SubStringCounterDataSource.java
// public class SubStringCounterDataSource implements RelationProvider {
// private static transient Logger log = LoggerFactory.getLogger(
// SubStringCounterDataSource.class);
//
// @Override
// public BaseRelation createRelation(SQLContext arg0, Map<String,
// String> arg1) {
// log.debug("-> createRelation()");
//
// java.util.Map<String, String> javaMap = scala.collection.JavaConverters
// .mapAsJavaMapConverter(arg1).asJava();
//
// SubStringCounterRelation br = new SubStringCounterRelation();
// br.setSqlContext(arg0);
//
// for (java.util.Map.Entry<String, String> entry : javaMap.entrySet()) {
// String key = entry.getKey();
// String value = entry.getValue();
// log.debug("[{}] --> [{}]", key, value);
// if (key.compareTo(K.PATH) == 0) {
// br.setFilename(value);
// } else if (key.startsWith(K.COUNT)) {
// br.addCriteria(value);
// }
// }
//
// return br;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l801_pi_in_progress/PiApp.java
import java.util.ArrayList;
import java.util.List;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.datasource.SubStringCounterDataSource;
package net.jgp.labs.spark.l801_pi_in_progress;
public class PiApp {
private static transient Logger log = LoggerFactory.getLogger(
|
SubStringCounterDataSource.class);
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l150_udf/l100_x2_multiplier/BasicExternalUdfFromTextFile.java
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/Multiplier2.java
// public class Multiplier2 implements UDF1<Integer, Integer> {
//
// private static final long serialVersionUID = -4519338105113996424L;
//
// @Override
// public Integer call(Integer t1) throws Exception {
// return t1 * 2;
// }
//
// }
|
import static org.apache.spark.sql.functions.callUDF;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import net.jgp.labs.spark.x.udf.Multiplier2;
|
package net.jgp.labs.spark.l150_udf.l100_x2_multiplier;
/**
* Simple UDF call to multiply a column by two. The UDF itself is defined in
* an external class in the
* <code>net.jgp.labs.spark.x.udf.Multiplier2</code> package.
*
* @author jgp
*
*/
public class BasicExternalUdfFromTextFile {
public static void main(String[] args) {
System.out
.println("Working directory = " + System.getProperty("user.dir"));
BasicExternalUdfFromTextFile app = new BasicExternalUdfFromTextFile();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder().appName("CSV to Dataset")
.master("local").getOrCreate();
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/Multiplier2.java
// public class Multiplier2 implements UDF1<Integer, Integer> {
//
// private static final long serialVersionUID = -4519338105113996424L;
//
// @Override
// public Integer call(Integer t1) throws Exception {
// return t1 * 2;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l150_udf/l100_x2_multiplier/BasicExternalUdfFromTextFile.java
import static org.apache.spark.sql.functions.callUDF;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import net.jgp.labs.spark.x.udf.Multiplier2;
package net.jgp.labs.spark.l150_udf.l100_x2_multiplier;
/**
* Simple UDF call to multiply a column by two. The UDF itself is defined in
* an external class in the
* <code>net.jgp.labs.spark.x.udf.Multiplier2</code> package.
*
* @author jgp
*
*/
public class BasicExternalUdfFromTextFile {
public static void main(String[] args) {
System.out
.println("Working directory = " + System.getProperty("user.dir"));
BasicExternalUdfFromTextFile app = new BasicExternalUdfFromTextFile();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder().appName("CSV to Dataset")
.master("local").getOrCreate();
|
spark.udf().register("x2Multiplier", new Multiplier2(),
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l600_ml/SimplePredictionFromTextFile.java
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/VectorBuilder.java
// public class VectorBuilder implements UDF1<Double, Vector> {
// private static final long serialVersionUID = -2991355883253063841L;
//
// @Override
// public Vector call(Double t1) throws Exception {
// return Vectors.dense(t1);
// }
//
// }
|
import static org.apache.spark.sql.functions.callUDF;
import org.apache.spark.ml.linalg.Vector;
import org.apache.spark.ml.linalg.VectorUDT;
import org.apache.spark.ml.linalg.Vectors;
import org.apache.spark.ml.regression.LinearRegression;
import org.apache.spark.ml.regression.LinearRegressionModel;
import org.apache.spark.ml.regression.LinearRegressionTrainingSummary;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import net.jgp.labs.spark.x.udf.VectorBuilder;
|
package net.jgp.labs.spark.l600_ml;
public class SimplePredictionFromTextFile {
public static void main(String[] args) {
System.out.println("Working directory = " + System.getProperty("user.dir"));
SimplePredictionFromTextFile app = new SimplePredictionFromTextFile();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder().appName(
"Simple prediction from Text File").master("local").getOrCreate();
|
// Path: src/main/java/net/jgp/labs/spark/x/udf/VectorBuilder.java
// public class VectorBuilder implements UDF1<Double, Vector> {
// private static final long serialVersionUID = -2991355883253063841L;
//
// @Override
// public Vector call(Double t1) throws Exception {
// return Vectors.dense(t1);
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l600_ml/SimplePredictionFromTextFile.java
import static org.apache.spark.sql.functions.callUDF;
import org.apache.spark.ml.linalg.Vector;
import org.apache.spark.ml.linalg.VectorUDT;
import org.apache.spark.ml.linalg.Vectors;
import org.apache.spark.ml.regression.LinearRegression;
import org.apache.spark.ml.regression.LinearRegressionModel;
import org.apache.spark.ml.regression.LinearRegressionTrainingSummary;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import net.jgp.labs.spark.x.udf.VectorBuilder;
package net.jgp.labs.spark.l600_ml;
public class SimplePredictionFromTextFile {
public static void main(String[] args) {
System.out.println("Working directory = " + System.getProperty("user.dir"));
SimplePredictionFromTextFile app = new SimplePredictionFromTextFile();
app.start();
}
private void start() {
SparkSession spark = SparkSession.builder().appName(
"Simple prediction from Text File").master("local").getOrCreate();
|
spark.udf().register("vectorBuilder", new VectorBuilder(), new VectorUDT());
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l040_structured_streaming/l021_read_lines_multiple_file_streams_in_progress/ReadLinesFromMultipleFileStreams.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.util.concurrent.TimeoutException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
package net.jgp.labs.spark.l040_structured_streaming.l021_read_lines_multiple_file_streams_in_progress;
/**
* Note working as it should be...
*
* @author jgp
*
*/
public class ReadLinesFromMultipleFileStreams {
private static transient Logger log = LoggerFactory.getLogger(
ReadLinesFromMultipleFileStreams.class);
public static void main(String[] args) {
ReadLinesFromMultipleFileStreams app = new ReadLinesFromMultipleFileStreams();
try {
app.start();
} catch (TimeoutException e) {
log.error("A timeout exception has occured: {}", e.getMessage());
}
}
private void start() throws TimeoutException {
log.debug("-> start()");
SparkSession spark = SparkSession.builder()
.appName("Read lines over a file stream").master("local")
.getOrCreate();
Dataset<Row> df = spark
.readStream()
.format("text")
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l040_structured_streaming/l021_read_lines_multiple_file_streams_in_progress/ReadLinesFromMultipleFileStreams.java
import java.util.concurrent.TimeoutException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
package net.jgp.labs.spark.l040_structured_streaming.l021_read_lines_multiple_file_streams_in_progress;
/**
* Note working as it should be...
*
* @author jgp
*
*/
public class ReadLinesFromMultipleFileStreams {
private static transient Logger log = LoggerFactory.getLogger(
ReadLinesFromMultipleFileStreams.class);
public static void main(String[] args) {
ReadLinesFromMultipleFileStreams app = new ReadLinesFromMultipleFileStreams();
try {
app.start();
} catch (TimeoutException e) {
log.error("A timeout exception has occured: {}", e.getMessage());
}
}
private void start() throws TimeoutException {
log.debug("-> start()");
SparkSession spark = SparkSession.builder()
.appName("Read lines over a file stream").master("local")
.getOrCreate();
Dataset<Row> df = spark
.readStream()
.format("text")
|
.load(StreamingUtils.getInputDirectory());
|
jgperrin/net.jgp.labs.spark
|
src/main/java/net/jgp/labs/spark/l040_structured_streaming/l000_read_lines_file_stream/ReadLinesFromFileStream.java
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
|
import java.util.concurrent.TimeoutException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
|
package net.jgp.labs.spark.l040_structured_streaming.l000_read_lines_file_stream;
public class ReadLinesFromFileStream {
private static transient Logger log = LoggerFactory.getLogger(
ReadLinesFromFileStream.class);
public static void main(String[] args) {
ReadLinesFromFileStream app = new ReadLinesFromFileStream();
try {
app.start();
} catch (TimeoutException e) {
log.error("A timeout exception has occured: {}", e.getMessage());
}
}
private void start() throws TimeoutException {
log.debug("-> start()");
SparkSession spark = SparkSession.builder()
.appName("Read lines over a file stream")
.master("local")
.getOrCreate();
Dataset<Row> df = spark
.readStream()
.format("text")
|
// Path: src/main/java/net/jgp/labs/spark/x/utils/streaming/StreamingUtils.java
// public class StreamingUtils {
//
// private String inputDirectory;
// private String inputSubDirectory1;
// private String inputSubDirectory2;
// private static StreamingUtils instance = null;
//
// private static StreamingUtils getInstance() {
// if (instance == null) {
// instance = new StreamingUtils();
// }
// return instance;
// }
//
// /**
// * @return the inputSubDirectory1
// */
// public static String getInputSubDirectory1() {
// return getInstance().inputSubDirectory1;
// }
//
// /**
// * @return the inputSubDirectory2
// */
// public static String getInputSubDirectory2() {
// return getInstance().inputSubDirectory2;
// }
//
// private StreamingUtils() {
// if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// this.inputDirectory = "C:\\TEMP\\";
// } else {
// this.inputDirectory = System.getProperty("java.io.tmpdir");
// }
// this.inputDirectory += "streaming" + File.separator + "in" + File.separator;
// createInputDirectory(this.inputDirectory);
// this.inputSubDirectory1 += this.inputDirectory + File.separator + "s1"
// + File.separator;
// createInputDirectory(this.inputSubDirectory1);
// this.inputSubDirectory2 += this.inputDirectory + File.separator + "s2"
// + File.separator;
// createInputDirectory(this.inputSubDirectory2);
// }
//
// public static boolean createInputDirectory() {
// return createInputDirectory(getInputDirectory());
// }
//
// private static boolean createInputDirectory(String directory) {
// File d = new File(directory);
// return d.mkdirs();
// }
//
// public static String getInputDirectory() {
// return getInstance().inputDirectory;
// }
//
// }
// Path: src/main/java/net/jgp/labs/spark/l040_structured_streaming/l000_read_lines_file_stream/ReadLinesFromFileStream.java
import java.util.concurrent.TimeoutException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jgp.labs.spark.x.utils.streaming.StreamingUtils;
package net.jgp.labs.spark.l040_structured_streaming.l000_read_lines_file_stream;
public class ReadLinesFromFileStream {
private static transient Logger log = LoggerFactory.getLogger(
ReadLinesFromFileStream.class);
public static void main(String[] args) {
ReadLinesFromFileStream app = new ReadLinesFromFileStream();
try {
app.start();
} catch (TimeoutException e) {
log.error("A timeout exception has occured: {}", e.getMessage());
}
}
private void start() throws TimeoutException {
log.debug("-> start()");
SparkSession spark = SparkSession.builder()
.appName("Read lines over a file stream")
.master("local")
.getOrCreate();
Dataset<Row> df = spark
.readStream()
.format("text")
|
.load(StreamingUtils.getInputDirectory());
|
galan/verjson
|
src/test/java/de/galan/verjson/core/ProxyStepTest.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
|
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
|
package de.galan.verjson.core;
/**
* CUT ProxyStep
*
* @author daniel
*/
public class ProxyStepTest extends AbstractTestParent {
@Test
public void toStringTest() throws Exception {
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/ProxyStepTest.java
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
package de.galan.verjson.core;
/**
* CUT ProxyStep
*
* @author daniel
*/
public class ProxyStepTest extends AbstractTestParent {
@Test
public void toStringTest() throws Exception {
|
ProxyStep ps1 = new ProxyStep(1L, new NoopStep());
|
galan/verjson
|
src/test/java/de/galan/verjson/core/ProxyStepTest.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
|
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
|
package de.galan.verjson.core;
/**
* CUT ProxyStep
*
* @author daniel
*/
public class ProxyStepTest extends AbstractTestParent {
@Test
public void toStringTest() throws Exception {
ProxyStep ps1 = new ProxyStep(1L, new NoopStep());
assertThat(ps1.toString()).isEqualTo("ProxyStep 1/NoopStep");
ProxyStep ps2 = new ProxyStep(2L, new NoopStep());
assertThat(ps2.toString()).isEqualTo("ProxyStep 2/NoopStep");
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/ProxyStepTest.java
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
package de.galan.verjson.core;
/**
* CUT ProxyStep
*
* @author daniel
*/
public class ProxyStepTest extends AbstractTestParent {
@Test
public void toStringTest() throws Exception {
ProxyStep ps1 = new ProxyStep(1L, new NoopStep());
assertThat(ps1.toString()).isEqualTo("ProxyStep 1/NoopStep");
ProxyStep ps2 = new ProxyStep(2L, new NoopStep());
assertThat(ps2.toString()).isEqualTo("ProxyStep 2/NoopStep");
|
ProxyStep ps3 = new ProxyStep(1L, new IncrementVersionStep());
|
galan/verjson
|
src/test/java/de/galan/verjson/core/VerjsonTimestamp.java
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
|
import static de.galan.commons.test.Tests.*;
import net.javacrumbs.jsonunit.fluent.JsonFluentAssert;
import org.junit.Before;
import org.junit.Test;
import de.galan.commons.time.ApplicationClock;
import de.galan.verjson.test.TestBean;
|
package de.galan.verjson.core;
/**
* Test behaviour of timestamp in MetaWrapper,Version,Verjson
*
* @author daniel
*/
public class VerjsonTimestamp {
private Versions versions;
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/VerjsonTimestamp.java
import static de.galan.commons.test.Tests.*;
import net.javacrumbs.jsonunit.fluent.JsonFluentAssert;
import org.junit.Before;
import org.junit.Test;
import de.galan.commons.time.ApplicationClock;
import de.galan.verjson.test.TestBean;
package de.galan.verjson.core;
/**
* Test behaviour of timestamp in MetaWrapper,Version,Verjson
*
* @author daniel
*/
public class VerjsonTimestamp {
private Versions versions;
|
private TestBean bean;
|
galan/verjson
|
src/main/java/de/galan/verjson/core/ProxyStep.java
|
// Path: src/main/java/de/galan/verjson/step/ProcessStepException.java
// public class ProcessStepException extends ReadException {
//
// public ProcessStepException(String message) {
// super(message);
// }
//
//
// public ProcessStepException(String message, Throwable cause) {
// super(message, cause);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
|
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
import de.galan.commons.logging.Logr;
import de.galan.verjson.step.ProcessStepException;
import de.galan.verjson.step.Step;
|
package de.galan.verjson.core;
/**
* Wraps a {@link Step}, adds the assigned source-version and successor.
*
* @author daniel
*/
public class ProxyStep implements Step {
private static final Logger LOG = Logr.get();
Long sourceVersion;
Step step;
Step successor;
public ProxyStep(Long sourceVersion, Step step) {
Preconditions.checkNotNull(sourceVersion, "SourceVersion could not be null");
Preconditions.checkNotNull(step, "Step could not be null");
this.sourceVersion = sourceVersion;
this.step = step;
}
@Override
|
// Path: src/main/java/de/galan/verjson/step/ProcessStepException.java
// public class ProcessStepException extends ReadException {
//
// public ProcessStepException(String message) {
// super(message);
// }
//
//
// public ProcessStepException(String message, Throwable cause) {
// super(message, cause);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
// Path: src/main/java/de/galan/verjson/core/ProxyStep.java
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
import de.galan.commons.logging.Logr;
import de.galan.verjson.step.ProcessStepException;
import de.galan.verjson.step.Step;
package de.galan.verjson.core;
/**
* Wraps a {@link Step}, adds the assigned source-version and successor.
*
* @author daniel
*/
public class ProxyStep implements Step {
private static final Logger LOG = Logr.get();
Long sourceVersion;
Step step;
Step successor;
public ProxyStep(Long sourceVersion, Step step) {
Preconditions.checkNotNull(sourceVersion, "SourceVersion could not be null");
Preconditions.checkNotNull(step, "Step could not be null");
this.sourceVersion = sourceVersion;
this.step = step;
}
@Override
|
public void process(JsonNode node) throws ProcessStepException {
|
galan/verjson
|
src/test/java/de/galan/verjson/core/VerjsonSerializerTest.java
|
// Path: src/test/java/de/galan/verjson/test/MyContainer.java
// public class MyContainer {
//
// TestBean bean;
//
//
// public MyContainer(TestBean bean) {
// this.bean = bean;
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
|
import static de.galan.commons.test.Tests.*;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import de.galan.commons.test.AbstractTestParent;
import de.galan.commons.time.ApplicationClock;
import de.galan.verjson.test.MyContainer;
import de.galan.verjson.test.TestBean;
|
package de.galan.verjson.core;
/**
* CUT Verjson - plugging external serializer/deserializer into Verjson
*
* @author daniel
*/
public class VerjsonSerializerTest extends AbstractTestParent {
private Verjson<MyContainer> verjson;
@Before
public void before() throws Exception {
Versions versions = new Versions();
versions.registerSerializer(new TestBeanSerializer());
versions.registerDeserializer(new TestBeanDeserializer());
verjson = new Verjson<MyContainer>(MyContainer.class, versions);
ApplicationClock.setUtc("2014-05-06T06:42:28Z");
}
@Test
public void testName() throws Exception {
|
// Path: src/test/java/de/galan/verjson/test/MyContainer.java
// public class MyContainer {
//
// TestBean bean;
//
//
// public MyContainer(TestBean bean) {
// this.bean = bean;
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/VerjsonSerializerTest.java
import static de.galan.commons.test.Tests.*;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import de.galan.commons.test.AbstractTestParent;
import de.galan.commons.time.ApplicationClock;
import de.galan.verjson.test.MyContainer;
import de.galan.verjson.test.TestBean;
package de.galan.verjson.core;
/**
* CUT Verjson - plugging external serializer/deserializer into Verjson
*
* @author daniel
*/
public class VerjsonSerializerTest extends AbstractTestParent {
private Verjson<MyContainer> verjson;
@Before
public void before() throws Exception {
Versions versions = new Versions();
versions.registerSerializer(new TestBeanSerializer());
versions.registerDeserializer(new TestBeanDeserializer());
verjson = new Verjson<MyContainer>(MyContainer.class, versions);
ApplicationClock.setUtc("2014-05-06T06:42:28Z");
}
@Test
public void testName() throws Exception {
|
String written = verjson.write(new MyContainer(new TestBean().number(123L).content("abc")));
|
galan/verjson
|
src/test/java/de/galan/verjson/step/validation/ValidationTest.java
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
|
import static de.galan.commons.test.Tests.*;
import static org.apache.commons.lang3.StringUtils.*;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import org.junit.Test;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.test.TestBean;
import de.galan.verjson.util.MetaWrapper;
|
package de.galan.verjson.step.validation;
/**
* CUT Validation
*
* @author daniel
*/
public class ValidationTest extends AbstractTestParent {
public Validation create(String schemaFile, String description) throws IOException {
String schema = readFile(getClass(), schemaFile);
return isNotEmpty(description) ? new Validation(schema, description) : new Validation(schema);
}
protected JsonNode readNode(String jsonFilename) throws JsonProcessingException, IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(Include.NON_NULL);
return mapper.readTree(readFile(getClass(), jsonFilename));
}
protected JsonNode createNode(Object obj) {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(Include.NON_NULL);
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
// Path: src/test/java/de/galan/verjson/step/validation/ValidationTest.java
import static de.galan.commons.test.Tests.*;
import static org.apache.commons.lang3.StringUtils.*;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import org.junit.Test;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.test.TestBean;
import de.galan.verjson.util.MetaWrapper;
package de.galan.verjson.step.validation;
/**
* CUT Validation
*
* @author daniel
*/
public class ValidationTest extends AbstractTestParent {
public Validation create(String schemaFile, String description) throws IOException {
String schema = readFile(getClass(), schemaFile);
return isNotEmpty(description) ? new Validation(schema, description) : new Validation(schema);
}
protected JsonNode readNode(String jsonFilename) throws JsonProcessingException, IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(Include.NON_NULL);
return mapper.readTree(readFile(getClass(), jsonFilename));
}
protected JsonNode createNode(Object obj) {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(Include.NON_NULL);
|
MetaWrapper wrapper = new MetaWrapper(1L, null, obj, null);
|
galan/verjson
|
src/test/java/de/galan/verjson/step/validation/ValidationTest.java
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
|
import static de.galan.commons.test.Tests.*;
import static org.apache.commons.lang3.StringUtils.*;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import org.junit.Test;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.test.TestBean;
import de.galan.verjson.util.MetaWrapper;
|
@Test
public void nullJson() throws Exception {
Validation val = create("TestBean-schema-01.txt", "test");
try {
val.process(null);
fail("should be invalid");
}
catch (InvalidJsonException ex) {
assertThat(ex.getMessage()).isEqualTo("Could not validate JSON against schema (test)");
}
}
@Test
public void emptyJson() throws Exception {
Validation val = create("TestBean-schema-01.txt", null);
try {
val.process(readNode("TestBean-json-empty.txt"));
fail("should be invalid");
}
catch (InvalidJsonException ex) {
assertThat(ex.getMessage()).isEqualTo(readFile(getClass(), "emptyJson-result.txt"));
}
}
@Test
public void simpleJson() throws Exception {
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
// Path: src/test/java/de/galan/verjson/step/validation/ValidationTest.java
import static de.galan.commons.test.Tests.*;
import static org.apache.commons.lang3.StringUtils.*;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import org.junit.Test;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.test.TestBean;
import de.galan.verjson.util.MetaWrapper;
@Test
public void nullJson() throws Exception {
Validation val = create("TestBean-schema-01.txt", "test");
try {
val.process(null);
fail("should be invalid");
}
catch (InvalidJsonException ex) {
assertThat(ex.getMessage()).isEqualTo("Could not validate JSON against schema (test)");
}
}
@Test
public void emptyJson() throws Exception {
Validation val = create("TestBean-schema-01.txt", null);
try {
val.process(readNode("TestBean-json-empty.txt"));
fail("should be invalid");
}
catch (InvalidJsonException ex) {
assertThat(ex.getMessage()).isEqualTo(readFile(getClass(), "emptyJson-result.txt"));
}
}
@Test
public void simpleJson() throws Exception {
|
TestBean bean = new TestBean().content("aaa").number(3L);
|
galan/verjson
|
src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* StepSequencer used to arrange the Steps
*
* @author daniel
*/
public class DefaultStepSequencer implements StepSequencer {
@Override
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* StepSequencer used to arrange the Steps
*
* @author daniel
*/
public class DefaultStepSequencer implements StepSequencer {
@Override
|
public Map<Long, ProxyStep> sequence(ListMultimap<Long, Step> steps) {
|
galan/verjson
|
src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
|
protected void assignSuccessors(List<ProxyStep> proxies) {
ProxyStep precessor = null;
for (ProxyStep step: Lists.reverse(proxies)) {
if (precessor != null) {
step.setSuccessor(precessor);
}
precessor = step;
}
}
protected List<ProxyStep> createProxies(ListMultimap<Long, Step> steps) {
List<ProxyStep> list = Lists.newArrayList();
for (Long sourceVersion: steps.keySet()) {
for (Step step: steps.get(sourceVersion)) {
list.add(new ProxyStep(sourceVersion, step));
}
}
return list;
}
protected List<ProxyStep> fillIncrements(List<ProxyStep> proxies) {
List<ProxyStep> result = Lists.newArrayList();
Long lastSourceVersion = 1L;
if (!proxies.isEmpty()) {
boolean increment = false;
for (ProxyStep proxy: proxies) {
while(lastSourceVersion < proxy.getSourceVersion()) {
// add increments
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
protected void assignSuccessors(List<ProxyStep> proxies) {
ProxyStep precessor = null;
for (ProxyStep step: Lists.reverse(proxies)) {
if (precessor != null) {
step.setSuccessor(precessor);
}
precessor = step;
}
}
protected List<ProxyStep> createProxies(ListMultimap<Long, Step> steps) {
List<ProxyStep> list = Lists.newArrayList();
for (Long sourceVersion: steps.keySet()) {
for (Step step: steps.get(sourceVersion)) {
list.add(new ProxyStep(sourceVersion, step));
}
}
return list;
}
protected List<ProxyStep> fillIncrements(List<ProxyStep> proxies) {
List<ProxyStep> result = Lists.newArrayList();
Long lastSourceVersion = 1L;
if (!proxies.isEmpty()) {
boolean increment = false;
for (ProxyStep proxy: proxies) {
while(lastSourceVersion < proxy.getSourceVersion()) {
// add increments
|
ProxyStep incProxy = new ProxyStep(lastSourceVersion, new IncrementVersionStep());
|
galan/verjson
|
src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
|
}
precessor = step;
}
}
protected List<ProxyStep> createProxies(ListMultimap<Long, Step> steps) {
List<ProxyStep> list = Lists.newArrayList();
for (Long sourceVersion: steps.keySet()) {
for (Step step: steps.get(sourceVersion)) {
list.add(new ProxyStep(sourceVersion, step));
}
}
return list;
}
protected List<ProxyStep> fillIncrements(List<ProxyStep> proxies) {
List<ProxyStep> result = Lists.newArrayList();
Long lastSourceVersion = 1L;
if (!proxies.isEmpty()) {
boolean increment = false;
for (ProxyStep proxy: proxies) {
while(lastSourceVersion < proxy.getSourceVersion()) {
// add increments
ProxyStep incProxy = new ProxyStep(lastSourceVersion, new IncrementVersionStep());
result.add(incProxy);
lastSourceVersion++;
}
//lastSourceVersion = proxy.getSourceVersion();
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
}
precessor = step;
}
}
protected List<ProxyStep> createProxies(ListMultimap<Long, Step> steps) {
List<ProxyStep> list = Lists.newArrayList();
for (Long sourceVersion: steps.keySet()) {
for (Step step: steps.get(sourceVersion)) {
list.add(new ProxyStep(sourceVersion, step));
}
}
return list;
}
protected List<ProxyStep> fillIncrements(List<ProxyStep> proxies) {
List<ProxyStep> result = Lists.newArrayList();
Long lastSourceVersion = 1L;
if (!proxies.isEmpty()) {
boolean increment = false;
for (ProxyStep proxy: proxies) {
while(lastSourceVersion < proxy.getSourceVersion()) {
// add increments
ProxyStep incProxy = new ProxyStep(lastSourceVersion, new IncrementVersionStep());
result.add(incProxy);
lastSourceVersion++;
}
//lastSourceVersion = proxy.getSourceVersion();
|
increment = !Validation.class.isAssignableFrom(proxy.getStep().getClass());
|
galan/verjson
|
src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
|
protected List<ProxyStep> createProxies(ListMultimap<Long, Step> steps) {
List<ProxyStep> list = Lists.newArrayList();
for (Long sourceVersion: steps.keySet()) {
for (Step step: steps.get(sourceVersion)) {
list.add(new ProxyStep(sourceVersion, step));
}
}
return list;
}
protected List<ProxyStep> fillIncrements(List<ProxyStep> proxies) {
List<ProxyStep> result = Lists.newArrayList();
Long lastSourceVersion = 1L;
if (!proxies.isEmpty()) {
boolean increment = false;
for (ProxyStep proxy: proxies) {
while(lastSourceVersion < proxy.getSourceVersion()) {
// add increments
ProxyStep incProxy = new ProxyStep(lastSourceVersion, new IncrementVersionStep());
result.add(incProxy);
lastSourceVersion++;
}
//lastSourceVersion = proxy.getSourceVersion();
increment = !Validation.class.isAssignableFrom(proxy.getStep().getClass());
result.add(proxy);
}
if (increment) {
// add increment
result.add(new ProxyStep(lastSourceVersion++, new IncrementVersionStep()));
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/DefaultStepSequencer.java
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.validation.Validation;
protected List<ProxyStep> createProxies(ListMultimap<Long, Step> steps) {
List<ProxyStep> list = Lists.newArrayList();
for (Long sourceVersion: steps.keySet()) {
for (Step step: steps.get(sourceVersion)) {
list.add(new ProxyStep(sourceVersion, step));
}
}
return list;
}
protected List<ProxyStep> fillIncrements(List<ProxyStep> proxies) {
List<ProxyStep> result = Lists.newArrayList();
Long lastSourceVersion = 1L;
if (!proxies.isEmpty()) {
boolean increment = false;
for (ProxyStep proxy: proxies) {
while(lastSourceVersion < proxy.getSourceVersion()) {
// add increments
ProxyStep incProxy = new ProxyStep(lastSourceVersion, new IncrementVersionStep());
result.add(incProxy);
lastSourceVersion++;
}
//lastSourceVersion = proxy.getSourceVersion();
increment = !Validation.class.isAssignableFrom(proxy.getStep().getClass());
result.add(proxy);
}
if (increment) {
// add increment
result.add(new ProxyStep(lastSourceVersion++, new IncrementVersionStep()));
|
result.add(new ProxyStep(lastSourceVersion, new NoopStep()));
|
galan/verjson
|
src/main/java/de/galan/verjson/step/IncrementVersionStep.java
|
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
|
import com.fasterxml.jackson.databind.JsonNode;
import de.galan.verjson.util.MetaWrapper;
|
package de.galan.verjson.step;
/**
* Increments the version field by one.
*
* @author daniel
*/
public class IncrementVersionStep implements Step {
@Override
public void process(JsonNode node) {
|
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
import com.fasterxml.jackson.databind.JsonNode;
import de.galan.verjson.util.MetaWrapper;
package de.galan.verjson.step;
/**
* Increments the version field by one.
*
* @author daniel
*/
public class IncrementVersionStep implements Step {
@Override
public void process(JsonNode node) {
|
Long version = MetaWrapper.getVersion(node);
|
galan/verjson
|
src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
|
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
|
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
|
module.addSerializer(new DateSerializer());
|
galan/verjson
|
src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
|
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
|
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
module.addSerializer(new DateSerializer());
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
module.addSerializer(new DateSerializer());
|
module.addDeserializer(Date.class, new DateDeserializer());
|
galan/verjson
|
src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
|
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
|
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
module.addSerializer(new DateSerializer());
module.addDeserializer(Date.class, new DateDeserializer());
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
module.addSerializer(new DateSerializer());
module.addDeserializer(Date.class, new DateDeserializer());
|
module.addSerializer(new ZonedDateTimeSerializer());
|
galan/verjson
|
src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
|
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
|
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
module.addSerializer(new DateSerializer());
module.addDeserializer(Date.class, new DateDeserializer());
module.addSerializer(new ZonedDateTimeSerializer());
|
// Path: src/main/java/de/galan/verjson/serializer/DateDeserializer.java
// public class DateDeserializer extends JsonDeserializer<Date> {
//
// @Override
// public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return from(instantUtc(jp.getText())).toDate();
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/DateSerializer.java
// public class DateSerializer extends JsonSerializer<Date> {
//
// @Override
// public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(from(value).toStringUtc());
// }
//
//
// @Override
// public Class<Date> handledType() {
// return Date.class;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeDeserializer.java
// public class ZonedDateTimeDeserializer extends JsonDeserializer<ZonedDateTime> {
//
// @Override
// public ZonedDateTime deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
// return ZonedDateTime.parse(jp.getText());
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/serializer/ZonedDateTimeSerializer.java
// public class ZonedDateTimeSerializer extends JsonSerializer<ZonedDateTime> {
//
// private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]'Z'").withZone(ZONE_UTC);
//
//
// @Override
// public void serialize(ZonedDateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
// jgen.writeString(DTF.format(value));
// }
//
//
// @Override
// public Class<ZonedDateTime> handledType() {
// return ZonedDateTime.class;
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/ObjectMapperFactory.java
import java.lang.reflect.Method;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.Lists;
import de.galan.commons.logging.Logr;
import de.galan.commons.util.Pair;
import de.galan.verjson.serializer.DateDeserializer;
import de.galan.verjson.serializer.DateSerializer;
import de.galan.verjson.serializer.ZonedDateTimeDeserializer;
import de.galan.verjson.serializer.ZonedDateTimeSerializer;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.ClassMemberValue;
import javassist.bytecode.annotation.EnumMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
package de.galan.verjson.core;
/**
* Construction of the Jackson ObjectMapper. Configuring Fieldintrospection, Serializer/Deserializer, Polymorph class
* registration.
*
* @author daniel
*/
public class ObjectMapperFactory {
private static final Logger LOG = Logr.get();
public ObjectMapper create(Versions versions) {
ObjectMapper result = new ObjectMapper();
result.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
result.setSerializationInclusion(Include.NON_NULL);
SimpleModule module = new SimpleModule("VerjsonModule");
registerSerializer(result, module, versions);
result.registerModule(module);
for (Class<?> parentClass: versions.getRegisteredSubclasses().keySet()) {
Class<?> mixin = generateMixIn(parentClass, versions.getRegisteredSubclasses().get(parentClass));
result.addMixIn(parentClass, mixin);
}
return result;
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void registerSerializer(ObjectMapper result, SimpleModule module, Versions versions) {
// Default serializer
module.addSerializer(new DateSerializer());
module.addDeserializer(Date.class, new DateDeserializer());
module.addSerializer(new ZonedDateTimeSerializer());
|
module.addDeserializer(ZonedDateTime.class, new ZonedDateTimeDeserializer());
|
galan/verjson
|
src/main/java/de/galan/verjson/core/Verjson.java
|
// Path: src/main/java/de/galan/verjson/step/ProcessStepException.java
// public class ProcessStepException extends ReadException {
//
// public ProcessStepException(String message) {
// super(message);
// }
//
//
// public ProcessStepException(String message, Throwable cause) {
// super(message, cause);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/util/ReadException.java
// public abstract class ReadException extends Exception {
//
// public ReadException(String message) {
// super(message);
// }
//
//
// public ReadException(String message, Throwable cause) {
// super(message, cause);
// }
//
// }
|
import static de.galan.commons.time.Instants.*;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Preconditions;
import de.galan.verjson.step.ProcessStepException;
import de.galan.verjson.step.Step;
import de.galan.verjson.util.MetaWrapper;
import de.galan.verjson.util.ReadException;
|
return wrapper;
}
public T readPlain(JsonNode node, long version) throws VersionNotSupportedException, NamespaceMismatchException, ProcessStepException, IOReadException {
return read(wrapPlainNode(node, version));
}
public T read(String json) throws VersionNotSupportedException, NamespaceMismatchException, ProcessStepException, IOReadException {
T result = null;
try {
result = read(readTree(json));
}
catch (IOException ex) {
throw new IOReadException("Reading json failed: " + ex.getMessage(), ex);
}
return result;
}
public T read(JsonNode node) throws VersionNotSupportedException, NamespaceMismatchException, ProcessStepException, IOReadException {
T result = null;
try {
verifyNamespace(node);
Long jsonVersion = verifyVersion(node);
steps.get(jsonVersion).process(node);
JsonNode data = MetaWrapper.getData(node);
result = getMapper().treeToValue(data, getValueClass());
}
|
// Path: src/main/java/de/galan/verjson/step/ProcessStepException.java
// public class ProcessStepException extends ReadException {
//
// public ProcessStepException(String message) {
// super(message);
// }
//
//
// public ProcessStepException(String message, Throwable cause) {
// super(message, cause);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/util/MetaWrapper.java
// public class MetaWrapper {
//
// public static final String ID_VERSION = "$v";
// public static final String ID_NAMESPACE = "$ns";
// public static final String ID_DATA = "$d";
// public static final String ID_TIMESTAMP = "$ts";
//
// /** Incremental version */
// @JsonProperty(ID_VERSION)
// private long version;
//
// /** Namespace for the data object */
// @JsonProperty(ID_NAMESPACE)
// private String namespace;
//
// /** Timestamp when the object was serialized */
// @JsonProperty(ID_TIMESTAMP)
// private Date timestamp;
//
// /** Actual payload */
// @JsonProperty(ID_DATA)
// private Object data;
//
//
// public MetaWrapper(long version, String namespace, Object data, Date timestamp) {
// this.version = version;
// this.namespace = namespace;
// this.data = data;
// this.timestamp = timestamp;
// }
//
//
// /** Returns the data node from a wrapped JsonNode */
// public static JsonNode getData(JsonNode node) {
// return getObj(obj(node), MetaWrapper.ID_DATA);
// }
//
//
// /** Returns the namespace from a wrapped JsonNode */
// public static String getNamespace(JsonNode node) {
// JsonNode nodeNs = obj(node).get(ID_NAMESPACE);
// return (nodeNs != null) ? nodeNs.asText() : null;
// }
//
//
// /** Returns the source version from a wrapped JsonNode */
// public static Long getVersion(JsonNode node) {
// return obj(node).get(ID_VERSION).asLong();
// }
//
//
// /** Sets the version on a wrapped JsonNode */
// public static void setVersion(JsonNode node, Long version) {
// obj(node).put(ID_VERSION, version);
// }
//
//
// /** Returns the timestamp from a wrapped JsonNode */
// public static Date getTimestamp(JsonNode node) {
// String text = obj(node).get(ID_TIMESTAMP).asText();
// return isNotBlank(text) ? from(instantUtc(text)).toDate() : null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/util/ReadException.java
// public abstract class ReadException extends Exception {
//
// public ReadException(String message) {
// super(message);
// }
//
//
// public ReadException(String message, Throwable cause) {
// super(message, cause);
// }
//
// }
// Path: src/main/java/de/galan/verjson/core/Verjson.java
import static de.galan.commons.time.Instants.*;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Preconditions;
import de.galan.verjson.step.ProcessStepException;
import de.galan.verjson.step.Step;
import de.galan.verjson.util.MetaWrapper;
import de.galan.verjson.util.ReadException;
return wrapper;
}
public T readPlain(JsonNode node, long version) throws VersionNotSupportedException, NamespaceMismatchException, ProcessStepException, IOReadException {
return read(wrapPlainNode(node, version));
}
public T read(String json) throws VersionNotSupportedException, NamespaceMismatchException, ProcessStepException, IOReadException {
T result = null;
try {
result = read(readTree(json));
}
catch (IOException ex) {
throw new IOReadException("Reading json failed: " + ex.getMessage(), ex);
}
return result;
}
public T read(JsonNode node) throws VersionNotSupportedException, NamespaceMismatchException, ProcessStepException, IOReadException {
T result = null;
try {
verifyNamespace(node);
Long jsonVersion = verifyVersion(node);
steps.get(jsonVersion).process(node);
JsonNode data = MetaWrapper.getData(node);
result = getMapper().treeToValue(data, getValueClass());
}
|
catch (ReadException ex) {
|
galan/verjson
|
src/test/java/de/galan/verjson/core/StepSequencerAttachTest.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import static org.assertj.core.api.Assertions.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.Lists;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* CUT StepSequencer attachVersions()
*
* @author daniel
*/
public class StepSequencerAttachTest extends AbstractStepSequencerParent {
@Test
public void attach() throws Exception {
List<ProxyStep> proxies = Lists.newArrayList(v(1), i(1), i(2), t(3), i(3));
Map<Long, ProxyStep> map = ss.attachVersions(proxies);
ProxyStep proxy1 = map.get(1L);
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/StepSequencerAttachTest.java
import static org.assertj.core.api.Assertions.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.Lists;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* CUT StepSequencer attachVersions()
*
* @author daniel
*/
public class StepSequencerAttachTest extends AbstractStepSequencerParent {
@Test
public void attach() throws Exception {
List<ProxyStep> proxies = Lists.newArrayList(v(1), i(1), i(2), t(3), i(3));
Map<Long, ProxyStep> map = ss.attachVersions(proxies);
ProxyStep proxy1 = map.get(1L);
|
assertThat(Validation.class).isAssignableFrom(proxy1.getStep().getClass());
|
galan/verjson
|
src/test/java/de/galan/verjson/core/StepSequencerAttachTest.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import static org.assertj.core.api.Assertions.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.Lists;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* CUT StepSequencer attachVersions()
*
* @author daniel
*/
public class StepSequencerAttachTest extends AbstractStepSequencerParent {
@Test
public void attach() throws Exception {
List<ProxyStep> proxies = Lists.newArrayList(v(1), i(1), i(2), t(3), i(3));
Map<Long, ProxyStep> map = ss.attachVersions(proxies);
ProxyStep proxy1 = map.get(1L);
assertThat(Validation.class).isAssignableFrom(proxy1.getStep().getClass());
ProxyStep proxy2 = map.get(2L);
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/StepSequencerAttachTest.java
import static org.assertj.core.api.Assertions.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.Lists;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* CUT StepSequencer attachVersions()
*
* @author daniel
*/
public class StepSequencerAttachTest extends AbstractStepSequencerParent {
@Test
public void attach() throws Exception {
List<ProxyStep> proxies = Lists.newArrayList(v(1), i(1), i(2), t(3), i(3));
Map<Long, ProxyStep> map = ss.attachVersions(proxies);
ProxyStep proxy1 = map.get(1L);
assertThat(Validation.class).isAssignableFrom(proxy1.getStep().getClass());
ProxyStep proxy2 = map.get(2L);
|
assertThat(IncrementVersionStep.class).isAssignableFrom(proxy2.getStep().getClass());
|
galan/verjson
|
src/test/java/de/galan/verjson/core/StepSequencerAttachTest.java
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import static org.assertj.core.api.Assertions.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.Lists;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* CUT StepSequencer attachVersions()
*
* @author daniel
*/
public class StepSequencerAttachTest extends AbstractStepSequencerParent {
@Test
public void attach() throws Exception {
List<ProxyStep> proxies = Lists.newArrayList(v(1), i(1), i(2), t(3), i(3));
Map<Long, ProxyStep> map = ss.attachVersions(proxies);
ProxyStep proxy1 = map.get(1L);
assertThat(Validation.class).isAssignableFrom(proxy1.getStep().getClass());
ProxyStep proxy2 = map.get(2L);
assertThat(IncrementVersionStep.class).isAssignableFrom(proxy2.getStep().getClass());
ProxyStep proxy3 = map.get(3L);
|
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/StepSequencerAttachTest.java
import static org.assertj.core.api.Assertions.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.Lists;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* CUT StepSequencer attachVersions()
*
* @author daniel
*/
public class StepSequencerAttachTest extends AbstractStepSequencerParent {
@Test
public void attach() throws Exception {
List<ProxyStep> proxies = Lists.newArrayList(v(1), i(1), i(2), t(3), i(3));
Map<Long, ProxyStep> map = ss.attachVersions(proxies);
ProxyStep proxy1 = map.get(1L);
assertThat(Validation.class).isAssignableFrom(proxy1.getStep().getClass());
ProxyStep proxy2 = map.get(2L);
assertThat(IncrementVersionStep.class).isAssignableFrom(proxy2.getStep().getClass());
ProxyStep proxy3 = map.get(3L);
|
assertThat(Transformation.class).isAssignableFrom(proxy3.getStep().getClass());
|
galan/verjson
|
src/test/java/de/galan/verjson/core/VerjsonTest.java
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
|
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.test.TestBean;
|
package de.galan.verjson.core;
/**
* CUT Verjson
*
* @author daniel
*/
public class VerjsonTest extends AbstractTestParent {
@Test
public void highestSourceVersionEmpty() throws Exception {
|
// Path: src/test/java/de/galan/verjson/test/TestBean.java
// public class TestBean {
//
// public String content;
// public Long number;
// public Object unrecognized;
//
//
// public TestBean content(String value) {
// content = value;
// return this;
// }
//
//
// public TestBean number(Long value) {
// number = value;
// return this;
// }
//
//
// public TestBean unrecognized(Object obj) {
// unrecognized = obj;
// return this;
// }
//
//
// @Override
// public int hashCode() {
// return Objects.hash(content, number);
// }
//
//
// @Override
// public boolean equals(Object obj) {
// return EqualsBuilder.reflectionEquals(this, obj, false);
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/VerjsonTest.java
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.test.TestBean;
package de.galan.verjson.core;
/**
* CUT Verjson
*
* @author daniel
*/
public class VerjsonTest extends AbstractTestParent {
@Test
public void highestSourceVersionEmpty() throws Exception {
|
Verjson<TestBean> verjson = Verjson.create(TestBean.class, null);
|
galan/verjson
|
src/test/java/de/galan/verjson/core/AbstractStepSequencerParent.java
|
// Path: src/test/java/de/galan/verjson/DummyTransformation.java
// public class DummyTransformation extends Transformation {
//
// @Override
// protected void transform(JsonNode node) {
// //nada
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/DummyValidation.java
// public class DummyValidation extends Validation {
//
// public DummyValidation(String schema, String description) {
// super(schema, description);
// }
//
//
// public DummyValidation(String schema) {
// super(schema);
// }
//
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
//
// @Override
// public JsonSchema create(String schemaString) {
// return null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import org.junit.Before;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.DummyTransformation;
import de.galan.verjson.DummyValidation;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* Abstract parent for StepSequencer tests
*
* @author daniel
*/
public class AbstractStepSequencerParent extends AbstractTestParent {
DefaultStepSequencer ss;
|
// Path: src/test/java/de/galan/verjson/DummyTransformation.java
// public class DummyTransformation extends Transformation {
//
// @Override
// protected void transform(JsonNode node) {
// //nada
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/DummyValidation.java
// public class DummyValidation extends Validation {
//
// public DummyValidation(String schema, String description) {
// super(schema, description);
// }
//
//
// public DummyValidation(String schema) {
// super(schema);
// }
//
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
//
// @Override
// public JsonSchema create(String schemaString) {
// return null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/AbstractStepSequencerParent.java
import org.junit.Before;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.DummyTransformation;
import de.galan.verjson.DummyValidation;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* Abstract parent for StepSequencer tests
*
* @author daniel
*/
public class AbstractStepSequencerParent extends AbstractTestParent {
DefaultStepSequencer ss;
|
Validation validate;
|
galan/verjson
|
src/test/java/de/galan/verjson/core/AbstractStepSequencerParent.java
|
// Path: src/test/java/de/galan/verjson/DummyTransformation.java
// public class DummyTransformation extends Transformation {
//
// @Override
// protected void transform(JsonNode node) {
// //nada
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/DummyValidation.java
// public class DummyValidation extends Validation {
//
// public DummyValidation(String schema, String description) {
// super(schema, description);
// }
//
//
// public DummyValidation(String schema) {
// super(schema);
// }
//
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
//
// @Override
// public JsonSchema create(String schemaString) {
// return null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import org.junit.Before;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.DummyTransformation;
import de.galan.verjson.DummyValidation;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* Abstract parent for StepSequencer tests
*
* @author daniel
*/
public class AbstractStepSequencerParent extends AbstractTestParent {
DefaultStepSequencer ss;
Validation validate;
|
// Path: src/test/java/de/galan/verjson/DummyTransformation.java
// public class DummyTransformation extends Transformation {
//
// @Override
// protected void transform(JsonNode node) {
// //nada
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/DummyValidation.java
// public class DummyValidation extends Validation {
//
// public DummyValidation(String schema, String description) {
// super(schema, description);
// }
//
//
// public DummyValidation(String schema) {
// super(schema);
// }
//
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
//
// @Override
// public JsonSchema create(String schemaString) {
// return null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/AbstractStepSequencerParent.java
import org.junit.Before;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.DummyTransformation;
import de.galan.verjson.DummyValidation;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* Abstract parent for StepSequencer tests
*
* @author daniel
*/
public class AbstractStepSequencerParent extends AbstractTestParent {
DefaultStepSequencer ss;
Validation validate;
|
Transformation transform;
|
galan/verjson
|
src/test/java/de/galan/verjson/core/AbstractStepSequencerParent.java
|
// Path: src/test/java/de/galan/verjson/DummyTransformation.java
// public class DummyTransformation extends Transformation {
//
// @Override
// protected void transform(JsonNode node) {
// //nada
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/DummyValidation.java
// public class DummyValidation extends Validation {
//
// public DummyValidation(String schema, String description) {
// super(schema, description);
// }
//
//
// public DummyValidation(String schema) {
// super(schema);
// }
//
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
//
// @Override
// public JsonSchema create(String schemaString) {
// return null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
|
import org.junit.Before;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.DummyTransformation;
import de.galan.verjson.DummyValidation;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
|
package de.galan.verjson.core;
/**
* Abstract parent for StepSequencer tests
*
* @author daniel
*/
public class AbstractStepSequencerParent extends AbstractTestParent {
DefaultStepSequencer ss;
Validation validate;
Transformation transform;
|
// Path: src/test/java/de/galan/verjson/DummyTransformation.java
// public class DummyTransformation extends Transformation {
//
// @Override
// protected void transform(JsonNode node) {
// //nada
// }
//
// }
//
// Path: src/test/java/de/galan/verjson/DummyValidation.java
// public class DummyValidation extends Validation {
//
// public DummyValidation(String schema, String description) {
// super(schema, description);
// }
//
//
// public DummyValidation(String schema) {
// super(schema);
// }
//
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
//
// @Override
// public JsonSchema create(String schemaString) {
// return null;
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/IncrementVersionStep.java
// public class IncrementVersionStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// Long version = MetaWrapper.getVersion(node);
// MetaWrapper.setVersion(node, version++);
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/NoopStep.java
// public class NoopStep implements Step {
//
// @Override
// public void process(JsonNode node) {
// // nothing
// }
//
// }
//
// Path: src/main/java/de/galan/verjson/step/Step.java
// public interface Step {
//
// /** Perform action on wrapped root node */
// public void process(JsonNode node) throws ProcessStepException;
//
// }
//
// Path: src/main/java/de/galan/verjson/step/transformation/Transformation.java
// public abstract class Transformation implements Step {
//
// @Override
// public void process(JsonNode node) {
// transform(MetaWrapper.getData(node));
// }
//
//
// /** Transformation instructions to migrate to the next version */
// protected abstract void transform(JsonNode node);
//
// }
//
// Path: src/main/java/de/galan/verjson/step/validation/Validation.java
// public class Validation implements Step {
//
// protected final static String LS = StandardSystemProperty.LINE_SEPARATOR.value();
//
// String description;
// JsonSchema schema; // thread-safe
// static JsonSchemaFactory factory; // cached
//
//
// public Validation(String schema) {
// this(schema, null);
// }
//
//
// public Validation(String schema, String description) {
// this.description = description;
// this.schema = create(schema);
// }
//
//
// public String getDescription() {
// return description;
// }
//
//
// protected String getDescriptionAppendable() {
// return isBlank(getDescription()) ? EMPTY : (" (" + getDescription() + ")");
// }
//
//
// protected static synchronized JsonSchemaFactory getFactory() {
// if (factory == null) {
// factory = JsonSchemaFactory.byDefault();
// }
// return factory;
// }
//
//
// protected JsonSchemaFactory getJsonSchemaFactory() {
// return getFactory();
// }
//
//
// @Override
// public void process(JsonNode node) throws ProcessStepException {
// validate(MetaWrapper.getData(node));
// }
//
//
// protected JsonSchema getSchema() {
// return schema;
// }
//
//
// public void validate(JsonNode node) throws InvalidJsonException {
// ProcessingReport report = null;
// try {
// report = getSchema().validate(node);
// }
// catch (Throwable ex) {
// throw new InvalidJsonException("Could not validate JSON against schema" + getDescriptionAppendable(), ex);
// }
// if (!report.isSuccess()) {
//
// StringBuilder builder = new StringBuilder();
// builder.append("Could not validate JSON against schema");
// builder.append(getDescriptionAppendable());
// builder.append(":");
// builder.append(LS);
// List<ProcessingMessage> messages = Lists.newArrayList(report);
// for (int i = 0; i < messages.size(); i++) {
// builder.append("- ");
// builder.append(messages.get(i).getMessage());
// builder.append(i == (messages.size() - 1) ? EMPTY : LS);
// }
// throw new InvalidJsonException(builder.toString());
// }
// }
//
//
// public JsonSchema create(String schemaString) {
// JsonSchema jsonSchema = null;
// try {
// JsonNode schemaNode = JsonLoader.fromString(schemaString);
// if (!getJsonSchemaFactory().getSyntaxValidator().schemaIsValid(schemaNode)) {
// throw new InvalidSchemaException("JSON Schema is invalid" + getDescriptionAppendable());
// }
// jsonSchema = getJsonSchemaFactory().getJsonSchema(schemaNode);
// }
// catch (NullPointerException | IOException | ProcessingException ex) {
// throw new InvalidSchemaException("JSON Schema could not be loaded" + getDescriptionAppendable(), ex);
// }
// return jsonSchema;
// }
//
// }
// Path: src/test/java/de/galan/verjson/core/AbstractStepSequencerParent.java
import org.junit.Before;
import de.galan.commons.test.AbstractTestParent;
import de.galan.verjson.DummyTransformation;
import de.galan.verjson.DummyValidation;
import de.galan.verjson.step.IncrementVersionStep;
import de.galan.verjson.step.NoopStep;
import de.galan.verjson.step.Step;
import de.galan.verjson.step.transformation.Transformation;
import de.galan.verjson.step.validation.Validation;
package de.galan.verjson.core;
/**
* Abstract parent for StepSequencer tests
*
* @author daniel
*/
public class AbstractStepSequencerParent extends AbstractTestParent {
DefaultStepSequencer ss;
Validation validate;
Transformation transform;
|
IncrementVersionStep increment;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.