index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/GCEventType.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import org.eclipse.jifa.gclog.event.evnetInfo.GCEventLevel; import org.eclipse.jifa.gclog.event.evnetInfo.GCPause; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.eclipse.jifa.gclog.event.evnetInfo.GCEventLevel.EVENT; import static org.eclipse.jifa.gclog.event.evnetInfo.GCEventLevel.PHASE; import static org.eclipse.jifa.gclog.event.evnetInfo.GCPause.*; /* * store all info that may affect the way model is organized and explained */ public class GCEventType { private static List<GCEventType> allEventTypes = new ArrayList<>(); private String name; private GCPause pause; private GCEventType[] phaseParentEventType; private GCEventLevel level; private List<GCCollectorType> gcs; // which gcs do this event type may occur? public static final GCEventType UNDEFINED = new GCEventType("Undefined", GCPause.PARTIAL, new GCCollectorType[]{}); // shared gc arrays private static GCCollectorType[] SERIAL = new GCCollectorType[]{GCCollectorType.SERIAL}; private static GCCollectorType[] PARALLEL = new GCCollectorType[]{GCCollectorType.PARALLEL}; private static GCCollectorType[] CMS = new GCCollectorType[]{GCCollectorType.CMS}; private static GCCollectorType[] G1 = new GCCollectorType[]{GCCollectorType.G1}; private static GCCollectorType[] SHENANDOAH = new GCCollectorType[]{GCCollectorType.SHENANDOAH}; private static GCCollectorType[] ZGC = new GCCollectorType[]{GCCollectorType.ZGC}; private static GCCollectorType[] SERIAL_AND_CMS = new GCCollectorType[]{GCCollectorType.SERIAL, GCCollectorType.CMS}; private static GCCollectorType[] ALL_GCS = new GCCollectorType[]{GCCollectorType.SERIAL, GCCollectorType.PARALLEL, GCCollectorType.G1, GCCollectorType.SHENANDOAH, GCCollectorType.ZGC, GCCollectorType.CMS, GCCollectorType.UNKNOWN}; private static GCCollectorType[] GENERATIONAL_GCS = new GCCollectorType[]{GCCollectorType.SERIAL, GCCollectorType.PARALLEL, GCCollectorType.G1, GCCollectorType.CMS, GCCollectorType.UNKNOWN}; // external event types public static final GCEventType YOUNG_GC = new GCEventType("Young GC", PAUSE, GENERATIONAL_GCS); public static final GCEventType G1_MIXED_GC = new GCEventType("Mixed GC", PAUSE, G1); public static final GCEventType FULL_GC = new GCEventType("Full GC", PAUSE, GENERATIONAL_GCS); public static final GCEventType G1_CONCURRENT_CYCLE = new GCEventType("Concurrent Mark Cycle", GCPause.PARTIAL, G1); public static final GCEventType G1_CONCURRENT_UNDO_CYCLE = new GCEventType("Concurrent Undo Cycle", GCPause.PARTIAL, G1); public static final GCEventType CMS_CONCURRENT_MARK_SWEPT = new GCEventType("CMS", GCPause.PARTIAL, CMS); public static final GCEventType ZGC_GARBAGE_COLLECTION = new GCEventType("Garbage Collection", PARTIAL, ZGC); // shared parent private static final GCEventType[] PARENT_CONCURRENT_MARK_CYCLE = {G1_CONCURRENT_CYCLE, CMS_CONCURRENT_MARK_SWEPT, ZGC_GARBAGE_COLLECTION}; private static final GCEventType[] PARENT_YOUNG_OLD_FULL_GC = {YOUNG_GC, FULL_GC, G1_MIXED_GC}; private static final GCEventType[] PARENT_ZGC = {ZGC_GARBAGE_COLLECTION}; // internal phase types // shared by serial and cms public static final GCEventType SERIAL_MARK_LIFE_OBJECTS = new GCEventType("Mark live objects", PAUSE, PARENT_YOUNG_OLD_FULL_GC, SERIAL_AND_CMS); public static final GCEventType SERIAL_COMPUTE_NEW_OBJECT_ADDRESSES = new GCEventType("Compute new object addresses", PAUSE, PARENT_YOUNG_OLD_FULL_GC, SERIAL_AND_CMS); public static final GCEventType SERIAL_ADJUST_POINTERS = new GCEventType("Adjust pointers", PAUSE, PARENT_YOUNG_OLD_FULL_GC, SERIAL_AND_CMS); public static final GCEventType SERIAL_MOVE_OBJECTS = new GCEventType("Move objects", PAUSE, PARENT_YOUNG_OLD_FULL_GC, SERIAL_AND_CMS); public static final GCEventType WEAK_REFS_PROCESSING = new GCEventType("Reference Processing", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, CMS); // Parallel public static final GCEventType PARALLEL_PHASE_MARKING = new GCEventType("Marking Phase", PAUSE, PARENT_YOUNG_OLD_FULL_GC, PARALLEL); public static final GCEventType PARALLEL_PHASE_SUMMARY = new GCEventType("Summary Phase", PAUSE, PARENT_YOUNG_OLD_FULL_GC, PARALLEL); public static final GCEventType PARALLEL_PHASE_ADJUST_ROOTS = new GCEventType("Adjust Roots", PAUSE, PARENT_YOUNG_OLD_FULL_GC, PARALLEL); public static final GCEventType PARALLEL_PHASE_COMPACTION = new GCEventType("Compaction Phase", PAUSE, PARENT_YOUNG_OLD_FULL_GC, PARALLEL); public static final GCEventType PARALLEL_PHASE_POST_COMPACT = new GCEventType("Post Compact", PAUSE, PARENT_YOUNG_OLD_FULL_GC, PARALLEL); // G1 public static final GCEventType G1_COLLECT_PRE_EVACUATION = new GCEventType("Pre Evacuate Collection Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_MERGE_HEAP_ROOTS = new GCEventType("Merge Heap Roots", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_COLLECT_EVACUATION = new GCEventType("Evacuate Collection Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_COLLECT_POST_EVACUATION = new GCEventType("Post Evacuate Collection Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_COLLECT_OTHER = new GCEventType("Other", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_CONCURRENT_CLEAR_CLAIMED_MARKS = new GCEventType("Concurrent Clear Claimed Marks", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_SCAN_ROOT_REGIONS = new GCEventType("Root Region Scanning", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_MARK_FROM_ROOTS = new GCEventType("Concurrent Mark From Roots", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_PRECLEAN = new GCEventType("Concurrent Preclean", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_MARK = new GCEventType("Concurrent Mark", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_MARK_RESET_FOR_OVERFLOW = new GCEventType("Concurrent Mark Reset For Overflow", CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_REMARK = new GCEventType("Pause Remark", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_REBUILD_REMEMBERED_SETS = new GCEventType("Concurrent Rebuild Remembered Sets", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_PAUSE_CLEANUP = new GCEventType("Pause Cleanup", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_CONCURRENT_CLEANUP_FOR_NEXT_MARK = new GCEventType("Concurrent Cleanup", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_FINALIZE_MARKING = new GCEventType("Finalize Marking", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, G1); public static final GCEventType G1_UNLOADING = new GCEventType("Class Unloading", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, G1); public static final GCEventType G1_GC_REFPROC = new GCEventType("Reference Processing", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, G1); public static final GCEventType G1_CONCURRENT_MARK_ABORT = new GCEventType("Concurrent Mark Abort", CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, G1); public static final GCEventType G1_MARK_LIVE_OBJECTS = new GCEventType("Mark Live Objects", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_PREPARE_FOR_COMPACTION = new GCEventType("Prepare for Compaction", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_ADJUST_POINTERS = new GCEventType("Adjust Pointers", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_COMPACT_HEAP = new GCEventType("Compact Heap", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_EXT_ROOT_SCANNING = new GCEventType("Ext Root Scanning", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_UPDATE_RS = new GCEventType("Update Remember Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_SCAN_RS = new GCEventType("Scan Remember Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_CODE_ROOT_SCANNING = new GCEventType("Code Root Scanning", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_OBJECT_COPY = new GCEventType("Object Copy", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_TERMINATION = new GCEventType("Termination", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_CODE_ROOT_FIXUP = new GCEventType("Code Root Fixup", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_CODE_ROOT_PURGE = new GCEventType("Code Root Purge", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_CLEAR_CT = new GCEventType("Clear Card Table", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_CHOOSE_CSET = new GCEventType("Choose Collection Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_EVACUATION_FAILURE = new GCEventType("Evacuation Failure", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_REF_ENQ = new GCEventType("Ref Enq", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_REDIRTY_CARDS = new GCEventType("Redirty Cards", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_HUMONGOUS_REGISTER = new GCEventType("Humongous Register", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_HUMONGOUS_RECLAIM = new GCEventType("Humongous Reclaim", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); public static final GCEventType G1_FREE_CSET = new GCEventType("Free Collection Set", PAUSE, PARENT_YOUNG_OLD_FULL_GC, G1); // CMS public static final GCEventType CMS_INITIAL_MARK = new GCEventType("Initial Mark", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_PRECLEAN = new GCEventType("Concurrent Preclean", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_ABORTABLE_PRECLEAN = new GCEventType("Concurrent Abortable preclean", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_MARK = new GCEventType("Concurrent Mark", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_FINAL_REMARK = new GCEventType("Final Remark", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_SWEEP = new GCEventType("Concurrent Sweep", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_RESET = new GCEventType("Concurrent Reset", GCPause.CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_INTERRUPTED = new GCEventType("Concurrent Mode Interrupted", CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_CONCURRENT_FAILURE = new GCEventType("Concurrent Mode Failure", CONCURRENT, PARENT_CONCURRENT_MARK_CYCLE, CMS); public static final GCEventType CMS_RESCAN = new GCEventType("Rescan", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, CMS); public static final GCEventType CMS_CLASS_UNLOADING = new GCEventType("Class unloading", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, CMS); public static final GCEventType CMS_SCRUB_SYMBOL_TABLE = new GCEventType("Scrub Symbol Table", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, CMS); public static final GCEventType CMS_SCRUB_STRING_TABLE = new GCEventType("Scrub String Table", PAUSE, PARENT_CONCURRENT_MARK_CYCLE, GCEventLevel.SUBPHASE, CMS); // ZGC public static final GCEventType ZGC_PAUSE_MARK_START = new GCEventType("Pause Mark Start", PAUSE, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_MARK = new GCEventType("Concurrent Mark", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_PAUSE_MARK_END = new GCEventType("Pause Mark End", PAUSE, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_NONREF = new GCEventType("Concurrent Process Non-Strong References", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_RESET_RELOC_SET = new GCEventType("Concurrent Reset Relocation Set", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_DETATCHED_PAGES = new GCEventType("Concurrent Destroy Detached Pages", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_SELECT_RELOC_SET = new GCEventType("Concurrent Select Relocation Set", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_PREPARE_RELOC_SET = new GCEventType("Concurrent Prepare Relocation Set", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_PAUSE_RELOCATE_START = new GCEventType("Pause Relocate Start", PAUSE, PARENT_ZGC, ZGC); public static final GCEventType ZGC_CONCURRENT_RELOCATE = new GCEventType("Concurrent Relocate", CONCURRENT, PARENT_ZGC, ZGC); public static final GCEventType ZGC_ALLOCATION_STALL = new GCEventType("Allocation Stall", PAUSE, ZGC); // other public static final GCEventType SAFEPOINT = new GCEventType("Safepoint", PAUSE, ALL_GCS); public static final GCEventType OUT_OF_MEMORY = new GCEventType("Out Of Memory", PAUSE, ALL_GCS); public boolean hasObjectPromotion() { return this == YOUNG_GC || this == G1_MIXED_GC; } public GCPause getPause() { return pause; } public String getName() { return name; } public GCEventType[] getPhaseParentEventType() { return phaseParentEventType; } // construction from outside not allowed, all instances are created in advance private GCEventType(String name, GCPause pause, GCCollectorType[] gcs) { this(name, pause, null, EVENT, gcs); } private GCEventType(String name, GCPause pause, GCEventType[] phaseParentEventType, GCCollectorType[] gcs) { this(name, pause, phaseParentEventType, phaseParentEventType == null ? EVENT : GCEventLevel.PHASE, gcs); } public GCEventType(String name, GCPause pause, GCEventType[] phaseParentEventType, GCEventLevel level, GCCollectorType[] gcs) { this.name = name; this.pause = pause; this.phaseParentEventType = phaseParentEventType; this.level = level; this.gcs = Arrays.asList(gcs); allEventTypes.add(this); } @Override public String toString() { return name; } public GCEventLevel getLevel() { return level; } public List<GCCollectorType> getGcs() { return gcs; } public static List<GCEventType> getAllEventTypes() { return allEventTypes; } public boolean isMainPauseEventType() { if (getPause() != PAUSE) { return false; } if (level == EVENT) { return true; } return level == PHASE && (phaseParentEventType == PARENT_ZGC || phaseParentEventType == PARENT_CONCURRENT_MARK_CYCLE); } public boolean isYoungGC() { return this == GCEventType.YOUNG_GC || this == GCEventType.G1_MIXED_GC; } public boolean isOldGC() { return this == GCEventType.G1_CONCURRENT_CYCLE || this == GCEventType.CMS_CONCURRENT_MARK_SWEPT; } public boolean isFullGC() { return this == GCEventType.FULL_GC || this == ZGC_GARBAGE_COLLECTION; } public static List<GCEventType> badEventTypes = List.of( FULL_GC, G1_CONCURRENT_MARK_ABORT, G1_EVACUATION_FAILURE, CMS_CONCURRENT_FAILURE, CMS_CONCURRENT_INTERRUPTED, OUT_OF_MEMORY, ZGC_ALLOCATION_STALL, G1_CONCURRENT_MARK_RESET_FOR_OVERFLOW); public boolean isBad() { return badEventTypes.contains(this); } }
3,100
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/GCModel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import io.vertx.ext.web.impl.ConcurrentLRUCache; import lombok.Data; import lombok.NoArgsConstructor; import lombok.ToString; import org.eclipse.jifa.common.listener.ProgressListener; import org.eclipse.jifa.common.request.PagingRequest; import org.eclipse.jifa.common.vo.PageView; import org.eclipse.jifa.gclog.diagnoser.AnalysisConfig; import org.eclipse.jifa.gclog.diagnoser.EventAbnormalDetector; import org.eclipse.jifa.gclog.diagnoser.GlobalDiagnoseInfo; import org.eclipse.jifa.gclog.diagnoser.GlobalDiagnoser; import org.eclipse.jifa.gclog.event.*; import org.eclipse.jifa.gclog.event.evnetInfo.*; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import org.eclipse.jifa.gclog.model.modeInfo.GCLogMetadata; import org.eclipse.jifa.gclog.model.modeInfo.GCLogStyle; import org.eclipse.jifa.gclog.model.modeInfo.VmOptions; import org.eclipse.jifa.gclog.util.Constant; import org.eclipse.jifa.gclog.util.DoubleData; import org.eclipse.jifa.gclog.util.LongData; import org.eclipse.jifa.gclog.vo.*; import org.eclipse.jifa.gclog.vo.MemoryStatistics.MemoryStatisticsItem; import org.eclipse.jifa.gclog.vo.PhaseStatistics.ParentStatisticsInfo; import org.eclipse.jifa.gclog.vo.PhaseStatistics.PhaseStatisticItem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.eclipse.jifa.gclog.event.evnetInfo.MemoryArea.*; import static org.eclipse.jifa.gclog.model.GCEventType.*; import static org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType.*; /** * GCModel contains all direct information from log and analysed data for query */ public abstract class GCModel { protected static final Logger LOGGER = LoggerFactory.getLogger(GCModel.class); // These 3 event lists below are used to support events like young/mixed/old/full. Other events. like // safepoint, allocation stall will be save in other lists. gcEvents and allEvents may be used in parsing. // When calculating derived info, gcEvents may be transformed, and allEvents and gcCollectionEvents will be // rebuilt. private List<GCEvent> gcEvents = new ArrayList<>(); // store parent events only private List<GCEvent> allEvents = new ArrayList<>(); // store all events, order by their appearance in log private List<GCEvent> gcCollectionEvents = new ArrayList<>(); // store events that contain collection info private List<Safepoint> safepoints = new ArrayList<>(); private List<ThreadEvent> ooms = new ArrayList<>(); // time from beginning of program private double startTime = Constant.UNKNOWN_DOUBLE; private double endTime = Constant.UNKNOWN_DOUBLE; private int parallelThread = Constant.UNKNOWN_INT; private int concurrentThread = Constant.UNKNOWN_INT; // in ms. referenceTimestamp + uptime of events is the true timestamp of events. // notice that uptime may not begin from 0 private double referenceTimestamp = Constant.UNKNOWN_DOUBLE; //shared basic info among different collectors private VmOptions vmOptions; private GCCollectorType collectorType; private GCLogStyle logStyle; private GCLogMetadata metadata; private ConcurrentLRUCache<AnalysisConfig, GlobalDiagnoseInfo> globalDiagnoseInfoCache = new ConcurrentLRUCache<>(3); private boolean metaspaceCapacityReliable = false; public GCModel() { } public GCModel(GCCollectorType collectorType) { this.collectorType = collectorType; } public void setCollectorType(GCCollectorType collectorType) { this.collectorType = collectorType; } public GCCollectorType getCollectorType() { return collectorType; } public GCLogStyle getLogStyle() { return logStyle; } public void setLogStyle(GCLogStyle logStyle) { this.logStyle = logStyle; } public List<GCEvent> getGcCollectionEvents() { return gcCollectionEvents; } public double getStartTime() { return startTime; } public double getEndTime() { return endTime; } private static final double START_TIME_ZERO_THRESHOLD = 60000; public void setStartTime(double startTime) { if (startTime < START_TIME_ZERO_THRESHOLD) { startTime = 0; } this.startTime = startTime; } public boolean isGenerational() { return collectorType != ZGC; } public boolean isPauseless() { return collectorType == ZGC; } public List<GCEvent> getAllEvents() { return allEvents; } public void setEndTime(double endTime) { this.endTime = endTime; } public double getDuration() { return getEndTime() - getStartTime(); } public boolean isEmpty() { return gcEvents.isEmpty(); } public List<GCEvent> getGcEvents() { return gcEvents; } public void setGcEvents(List<GCEvent> gcEvents) { this.gcEvents = gcEvents; } public GCEvent createAndGetEvent() { GCEvent event = new GCEvent(); gcEvents.add(event); return event; } public boolean hasHumongousArea() { return collectorType == G1 && logStyle == GCLogStyle.UNIFIED; } public boolean hasOldGC() { return collectorType == G1 || collectorType == CMS; } public <T extends TimedEvent> void iterateEventsWithinTimeRange(List<T> eventList, TimeRange range, Consumer<T> consumer) { int indexLow = binarySearchEventIndex(eventList, range.getStart(), true); int indexHigh = binarySearchEventIndex(eventList, range.getEnd(), false); for (int i = indexLow; i < indexHigh; i++) { consumer.accept(eventList.get(i)); } } // Return index of the first event after time if searchLow, first event after time if !searchLow . // eventList must be ordered by startTime. private int binarySearchEventIndex(List<? extends TimedEvent> eventList, double time, boolean searchLow) { if (searchLow && time <= getStartTime()) { return 0; } else if (!searchLow && time >= getEndTime()) { return eventList.size(); } TimedEvent eventForSearching = new TimedEvent(time); int result = Collections.binarySearch(eventList, eventForSearching, Comparator.comparingDouble(TimedEvent::getStartTime)); if (result < 0) { return -(result + 1); } else { if (searchLow) { while (result >= 0 && eventList.get(result).getStartTime() >= time) { result--; } return result + 1; } else { while (result < eventList.size() && eventList.get(result).getStartTime() <= time) { result++; } return result; } } } public List<Safepoint> getSafepoints() { return safepoints; } public void addSafepoint(Safepoint safepoint) { safepoints.add(safepoint); } public List<ThreadEvent> getOoms() { return ooms; } public void addOom(ThreadEvent oom) { ooms.add(oom); } private TimeRange makeValidTimeRange(TimeRange range) { if (range == null) { return new TimeRange(getStartTime(), getEndTime()); } double start = Math.max(range.getStart(), getStartTime()); double end = Math.min(range.getEnd(), getEndTime()); return new TimeRange(start, end); } private void putPhaseStatisticData(GCEvent event, String name, Map<String, DoubleData[]> map, boolean phase) { DoubleData[] data = map.getOrDefault(name, null); if (data == null) { data = new DoubleData[2]; data[0] = new DoubleData(); data[1] = new DoubleData(); map.put(name, data); } data[0].add(phase ? event.getInterval() : event.getCauseInterval()); data[1].add(event.getDuration()); } private PhaseStatisticItem makePhaseStatisticItem(String name, DoubleData[] data) { return new PhaseStatisticItem(name, data[1].getN(), data[0].average(), data[0].getMin() , data[1].average(), data[1].getMax(), data[1].getSum()); } public PhaseStatistics getPhaseStatistics(TimeRange range) { range = makeValidTimeRange(range); List<GCEventType> parents = getParentEventTypes(); // DoubleData[] is an array of interval and duration Map<String, DoubleData[]> parentData = new HashMap<>(); List<Map<String, DoubleData[]>> phaseData = new ArrayList<>(); List<Map<String, DoubleData[]>> causeData = new ArrayList<>(); for (int i = 0; i < parents.size(); i++) { phaseData.add(new HashMap<>()); causeData.add(new HashMap<>()); } iterateEventsWithinTimeRange(gcEvents, range, event -> { int index = parents.indexOf(event.getEventType()); if (index < 0) { return; } putPhaseStatisticData(event, event.getEventType().getName(), parentData, true); if (event.getCause() != null) { putPhaseStatisticData(event, event.getCause().getName(), causeData.get(index), false); } event.phasesDoDFS(phase -> putPhaseStatisticData(phase, phase.getEventType().getName(), phaseData.get(index), true)); }); List<ParentStatisticsInfo> result = new ArrayList<>(); for (int i = 0; i < parents.size(); i++) { String name = parents.get(i).getName(); if (parentData.containsKey(name)) { result.add(new ParentStatisticsInfo( makePhaseStatisticItem(parents.get(i).getName(), parentData.get(name)), phaseData.get(i).entrySet().stream().map(entry -> makePhaseStatisticItem(entry.getKey(), entry.getValue())).collect(Collectors.toList()), causeData.get(i).entrySet().stream().map(entry -> makePhaseStatisticItem(entry.getKey(), entry.getValue())).collect(Collectors.toList()) )); } } return new PhaseStatistics(result); } public PauseStatistics getPauseStatistics(TimeRange range) { range = makeValidTimeRange(range); DoubleData pause = new DoubleData(true); iterateEventsWithinTimeRange(gcEvents, range, e -> { e.pauseEventOrPhasesDo(event -> pause.add(event.getPause())); }); return new PauseStatistics( pause.getN() == 0 ? Constant.UNKNOWN_DOUBLE : 1 - pause.getSum() / range.length(), pause.average(), pause.getMedian(), pause.getPercentile(0.99), pause.getPercentile(0.999), pause.getMax()); } public Map<String, int[]> getPauseDistribution(TimeRange range, int[] partitions) { range = makeValidTimeRange(range); Map<String, int[]> distribution = new HashMap<>(); iterateEventsWithinTimeRange(gcEvents, range, e -> { e.pauseEventOrPhasesDo(event -> { if (event.getPause() >= 0) { String eventType = event.getEventType().getName(); int pause = (int) event.getPause(); int index = Arrays.binarySearch(partitions, pause); if (index < 0) { index = -index - 2; } if (index < 0) { return; } int[] nums = distribution.getOrDefault(eventType, null); if (nums == null) { nums = new int[partitions.length]; distribution.put(eventType, nums); } nums[index]++; } }); }); return distribution; } public MemoryStatistics getMemoryStatistics(TimeRange range) { range = makeValidTimeRange(range); // 1st dimension is generation, see definition of MemoryStatistics // 2nd dimension is capacityAvg, usedMax, usedAvgAfterFullGC,usedAvgAfterOldGC see definition of MemoryStatisticsItem // usedAvgAfterOldGC is more complicated, will deal with it afterwards LongData[][] data = new LongData[5][4]; MemoryArea[] generations = {YOUNG, OLD, HUMONGOUS, HEAP, METASPACE}; for (int i = 0; i < 5; i++) { for (int j = 0; j < 4; j++) { data[i][j] = new LongData(); } } iterateEventsWithinTimeRange(gcCollectionEvents, range, event -> { for (int genIndex = 0; genIndex < generations.length; genIndex++) { MemoryArea generation = generations[genIndex]; GCMemoryItem memory = event.getMemoryItem(generation); if (memory != null) { data[genIndex][0].add(memory.getPostCapacity()); data[genIndex][1].add(Math.max(memory.getPreUsed(), memory.getPostUsed())); if (event.isFullGC() && generation != YOUNG) { data[genIndex][2].add(memory.getPostUsed()); } } } }); calculateUsedAvgAfterOldGC(range, data); // generate result MemoryStatistics statistics = new MemoryStatistics(); statistics.setYoung(new MemoryStatisticsItem((long) data[0][0].average(), data[0][1].getMax(), Constant.UNKNOWN_LONG, Constant.UNKNOWN_LONG)); statistics.setOld(new MemoryStatisticsItem((long) data[1][0].average(), data[1][1].getMax(), (long) data[1][2].average(), (long) data[1][3].average())); statistics.setHumongous(new MemoryStatisticsItem((long) data[2][0].average(), data[2][1].getMax(), (long) data[2][2].average(), (long) data[2][3].average())); statistics.setHeap(new MemoryStatisticsItem((long) data[3][0].average(), data[3][1].getMax(), (long) data[3][2].average(), (long) data[3][3].average())); statistics.setMetaspace(new MemoryStatisticsItem(Constant.UNKNOWN_LONG, data[4][1].getMax(), (long) data[4][2].average(), (long) data[4][3].average())); // Metaspace capacity printed in gclog may be reserve space rather than commit size, so we // try to read it from vm option if (isMetaspaceCapacityReliable()) { statistics.getMetaspace().setCapacityAvg((long) data[4][0].average()); } else if (vmOptions != null) { statistics.getMetaspace().setCapacityAvg(vmOptions.getMetaspaceSize()); } return statistics; } protected void calculateUsedAvgAfterOldGC(TimeRange range, LongData[][] data) { // for overriding } public ObjectStatistics getObjectStatistics(TimeRange range) { range = makeValidTimeRange(range); LongData allocation = new LongData(); LongData promotion = new LongData(); iterateEventsWithinTimeRange(gcCollectionEvents, range, event -> { allocation.add(event.getAllocation()); promotion.add(event.getPromotion()); }); return new ObjectStatistics( allocation.getSum() != Constant.UNKNOWN_DOUBLE ? allocation.getSum() / range.length() : Constant.UNKNOWN_DOUBLE, promotion.getSum() != Constant.UNKNOWN_DOUBLE ? promotion.getSum() / range.length() : Constant.UNKNOWN_DOUBLE, (long) promotion.average(), promotion.getMax() ); } // decide start and end time using events public void autoDecideStartEndTime() { gcEvents.sort(Comparator.comparingDouble(GCEvent::getStartTime)); if (gcEvents.size() == 0) { return; } GCEvent event = gcEvents.get(gcEvents.size() - 1); double endTime = event.getEndTime(); if (event.hasPhases()) { endTime = Math.max(endTime, event.getPhases().get(event.getPhases().size() - 1).getEndTime()); } setEndTime(Math.max(this.endTime, endTime)); } public Map<String, List<Object[]>> getTimeGraphData(String[] dataTypes) { Map<String, List<Object[]>> result = new LinkedHashMap<>(); for (String dataType : dataTypes) { if (dataType.endsWith("Used") || dataType.endsWith("Capacity")) { result.put(dataType, getTimeGraphMemoryData(dataType)); } else if (dataType.equals("promotion")) { result.put(dataType, getTimeGraphPromotionData()); } else if (dataType.equals("reclamation")) { result.put(dataType, getTimeGraphReclamationData()); } else { result.put(dataType, getTimeGraphDurationData(dataType)); } } return result; } private List<Object[]> getTimeGraphMemoryData(String dataType) { boolean used = dataType.endsWith("Used"); String areString = dataType.substring(0, dataType.length() - (used ? "Used" : "Capacity").length()); MemoryArea area = MemoryArea.getMemoryArea(areString); List<Object[]> result = new ArrayList<>(); for (GCEvent event : this.gcCollectionEvents) { GCMemoryItem memory = event.getMemoryItem(area); if (memory == null) { continue; } if (used) { if (memory.getPreUsed() != Constant.UNKNOWN_LONG) { result.add(new Object[]{(long) event.getStartTime(), memory.getPreUsed()}); } if (memory.getPostUsed() != Constant.UNKNOWN_LONG) { result.add(new Object[]{(long) event.getEndTime(), memory.getPostUsed()}); } } else { if (memory.getPostCapacity() != Constant.UNKNOWN_LONG) { result.add(new Object[]{(long) event.getEndTime(), memory.getPostCapacity()}); } } } result.sort(Comparator.comparingLong(d -> (long) d[0])); return result; } private List<Object[]> getTimeGraphPromotionData() { return allEvents.stream() .filter(event -> event.getPromotion() >= 0) .map(event -> new Object[]{(long) event.getStartTime(), event.getPromotion()}) .collect(Collectors.toList()); } private List<Object[]> getTimeGraphReclamationData() { return gcCollectionEvents.stream() .filter(event -> event.getReclamation() != Constant.UNKNOWN_LONG) .map(event -> new Object[]{(long) event.getStartTime(), event.getReclamation()}) .collect(Collectors.toList()); } private List<Object[]> getTimeGraphDurationData(String phaseName) { return allEvents.stream() .filter(event -> event.getEventType().getName().equals(phaseName) && event.getDuration() != Constant.UNKNOWN_DOUBLE) .map(event -> new Object[]{(long) event.getStartTime(), event.getDuration()}) .collect(Collectors.toList()); } public GlobalDiagnoser.GlobalAbnormalInfo getGlobalAbnormalInfo(AnalysisConfig config) { config.setTimeRange(makeValidTimeRange(config.getTimeRange())); return new GlobalDiagnoser(this, config).diagnose(); } public long getRecommendMaxHeapSize() { // not supported return Constant.UNKNOWN_INT; } public void putEvent(GCEvent event) { gcEvents.add(event); allEvents.add(event); } public void addPhase(GCEvent parent, GCEvent phase) { allEvents.add(phase); parent.addPhase(phase); } public void calculateDerivedInfo(ProgressListener progressListener) { allEvents = null; // must be done before other steps filterInvalidEvents(); autoDecideStartEndTime(); decideAndFixEventInfo(); // let subclass do something doBeforeCalculatingDerivedInfo(); rebuildEventLists(); // the structure of gcEvents and allEvents should not change after this line // calculate derived data for events themselves calculateEventsInterval(); calculateEventsMemoryInfo(); // let subclass do something doAfterCalculatingDerivedInfo(); // data in events should not change after this line // calculate specific data prepared for route api, order of these calls doesn't matter calculateGcModelMetadata(); } // for implementation protected void doBeforeCalculatingDerivedInfo() { } // for implementation protected void doAfterCalculatingDerivedInfo() { } private void rebuildEventLists() { allEvents = new ArrayList<>(); for (GCEvent event : gcEvents) { allEvents.add(event); if (event.hasPhases()) { allEvents.addAll(event.getPhases()); } } allEvents.sort(Comparator.comparingDouble(GCEvent::getStartTime)); for (int i = 0; i < allEvents.size(); i++) { allEvents.get(i).setId(i); } } private void decideAndFixEventInfo() { for (GCEvent event : gcEvents) { List<GCEvent> phases = event.getPhases(); if (phases == null) { continue; } for (int i = phases.size() - 1; i >= 0; i--) { GCEvent phase = phases.get(i); if (phase.getDuration() == Constant.UNKNOWN_DOUBLE) { //this is unlikely to happen, just give a reasonable value phase.setDuration(phases.get(phases.size() - 1).getStartTime() - phase.getStartTime()); } } if (event.getDuration() == Constant.UNKNOWN_DOUBLE && getStartTime() != Constant.UNKNOWN_DOUBLE) { event.setDuration(phases.get(phases.size() - 1).getEndTime() - event.getStartTime()); } } } /** * calculate heap size(young, humongous, old, metaspace,total), * object allocation, reclamation and promotion */ private void calculateEventsMemoryInfo() { for (GCEvent event : gcEvents) { calculateEventMemoryItems(event); } gcCollectionEvents.sort(Comparator.comparingDouble(GCEvent::getStartTime)); long lastTotalMemory = 0; for (GCEvent event : gcCollectionEvents) { GCMemoryItem young = event.getMemoryItem(YOUNG); GCMemoryItem total = event.getMemoryItem(HEAP); GCMemoryItem humongous = event.getMemoryItem(HUMONGOUS); // reclamation // sometimes it may have been calculated during parsing log if (event.getReclamation() == Constant.UNKNOWN_INT && total != null && total.getPreUsed() != Constant.UNKNOWN_INT && total.getPostUsed() != Constant.UNKNOWN_INT) { event.setReclamation(zeroIfNegative(total.getPreUsed() - total.getPostUsed())); } // promotion if (event.getPromotion() == Constant.UNKNOWN_INT && event.hasPromotion() && event.getEventType() != G1_MIXED_GC && young != null && total != null) { // notice: g1 young mixed gc should have promotion, but we have no way to know it exactly long youngReduction = young.getMemoryReduction(); long totalReduction = total.getMemoryReduction(); if (youngReduction != Constant.UNKNOWN_INT && totalReduction != Constant.UNKNOWN_INT) { long promotion = youngReduction - totalReduction; if (humongous != null && humongous.getMemoryReduction() != Constant.UNKNOWN_INT) { promotion -= humongous.getMemoryReduction(); } event.setPromotion(zeroIfNegative(promotion)); } } // allocation if (event.getAllocation() == Constant.UNKNOWN_INT && total != null && total.getPreUsed() != Constant.UNKNOWN_INT) { // As to concurrent event, allocation is composed of two parts: allocation between two adjacent events // and during event. If original allocation is not unknown, that value is allocation during event. event.setAllocation(zeroIfNegative( zeroIfUnknownInt(event.getAllocation()) + total.getPreUsed() - lastTotalMemory)); lastTotalMemory = total.getPostUsed(); } } } private long zeroIfUnknownInt(long x) { return x == Constant.UNKNOWN_INT ? 0 : x; } private long zeroIfNegative(long x) { return x < 0 ? 0 : x; } private void calculateEventMemoryItems(GCEvent event) { event.phasesDoDFS(this::calculateEventMemoryItems); if (event.getMemoryItems() == null) { return; } gcCollectionEvents.add(event); // hack: Survivor capacity of g1 is not printed in jdk8. Make it equal to pre used so that // we can calculate young and old capacity if (event.getMemoryItem(SURVIVOR) != null && event.getMemoryItem(SURVIVOR).getPostCapacity() == Constant.UNKNOWN_INT) { event.getMemoryItem(SURVIVOR).setPostCapacity(event.getMemoryItem(SURVIVOR).getPreUsed()); } //case 1: know eden and survivor, calculate young GCMemoryItem young = event.getMemoryItemOrEmptyObject(EDEN) .merge(event.getMemoryItem(SURVIVOR)); young.setArea(YOUNG); event.setMemoryItem(event.getMemoryItemOrEmptyObject(YOUNG) .updateIfAbsent(young), true); //case 2: know young and old, calculate heap GCMemoryItem heap = event.getMemoryItemOrEmptyObject(YOUNG) .merge(event.getMemoryItem(OLD)) .mergeIfPresent(event.getMemoryItem(HUMONGOUS)) .mergeIfPresent(event.getMemoryItem(ARCHIVE)); heap.setArea(HEAP); event.setMemoryItem(event.getMemoryItemOrEmptyObject(HEAP) .updateIfAbsent(heap), true); //case 3: know old and heap, calculate young young = event.getMemoryItemOrEmptyObject(HEAP) .subtract(event.getMemoryItem(OLD)) .subtractIfPresent(event.getMemoryItem(HUMONGOUS)) .subtractIfPresent(event.getMemoryItem(ARCHIVE)); young.setArea(YOUNG); event.setMemoryItem(event.getMemoryItemOrEmptyObject(YOUNG) .updateIfAbsent(young), true); //case 4: know young and heap, calculate old GCMemoryItem old = event.getMemoryItemOrEmptyObject(HEAP) .subtract(event.getMemoryItem(YOUNG)) .subtractIfPresent(event.getMemoryItem(HUMONGOUS)) .subtractIfPresent(event.getMemoryItem(ARCHIVE)); old.setArea(OLD); event.setMemoryItem(event.getMemoryItemOrEmptyObject(OLD) .updateIfAbsent(old), true); // Although we can calculate metaspace = class + non class, there is no need to do // so because when class and non class are known, metaspace must have been known } private void filterInvalidEvents() { // Sometimes the given log is just a part of the complete log. This may lead to some incomplete events at // beginning or end of this log. Such event at beginning is likely to have been dealt by parser, so here we try // to deal with the last event if (gcEvents.get(gcEvents.size() - 1).getEndTime() == Constant.UNKNOWN_DOUBLE) { gcEvents.remove(gcEvents.size() - 1); } } protected static List<GCEventType> calcAllEventTypes(GCCollectorType collector) { return GCEventType.getAllEventTypes().stream() .filter(e -> e.getGcs().contains(collector)) .collect(Collectors.toList()); } protected static List<GCEventType> calcPauseEventTypes(GCCollectorType collector) { return GCEventType.getAllEventTypes().stream() .filter(e -> e.getGcs().contains(collector) && e.getPause() == GCPause.PAUSE) .collect(Collectors.toList()); } protected static List<GCEventType> calcMainPauseEventTypes(GCCollectorType collector) { return GCEventType.getAllEventTypes().stream() .filter(e -> e.getGcs().contains(collector) && e.isMainPauseEventType()) .collect(Collectors.toList()); } protected static List<GCEventType> calcParentEventTypes(GCCollectorType collector) { return Stream.of(YOUNG_GC, G1_MIXED_GC, CMS_CONCURRENT_MARK_SWEPT, G1_CONCURRENT_CYCLE, FULL_GC, ZGC_GARBAGE_COLLECTION) .filter(e -> e.getGcs().contains(collector)) .collect(Collectors.toList()); } protected abstract List<GCEventType> getAllEventTypes(); protected abstract List<GCEventType> getPauseEventTypes(); protected abstract List<GCEventType> getMainPauseEventTypes(); protected abstract List<GCEventType> getParentEventTypes(); protected abstract List<GCEventType> getImportantEventTypes(); public GCEvent getLastEventWithCondition(Predicate<GCEvent> condition) { for (int i = allEvents.size() - 1; i >= 0; i--) { GCEvent event = allEvents.get(i); if (condition.test(event)) { return event; } } return null; } // mainly used in jdk8, where gcid may be missing public GCEvent getLastEventOfType(GCEventType... types) { List<GCEventType> typeList = Arrays.asList(types); return getLastEventWithCondition(event -> typeList.contains(event.getEventType())); } // mainly used in parser of jdk11, where gcid is always logged if tag includes gc public GCEvent getLastEventOfGCID(int gcid) { return getLastEventWithCondition(event -> event.getEventLevel() == GCEventLevel.EVENT && event.getGcid() == gcid); } public double getReferenceTimestamp() { return referenceTimestamp; } public void setReferenceTimestamp(double referenceTimestamp) { this.referenceTimestamp = referenceTimestamp; } public void setVmOptions(VmOptions vmOptions) { this.vmOptions = vmOptions; } public VmOptions getVmOptions() { return vmOptions; } private void calculateEventsInterval() { Map<GCEventType, Double> lastEndTime = new HashMap<>(); Map<GCEventType, Map<GCCause, Double>> lastCauseEndTime = new HashMap<>(); for (GCEvent event : allEvents) { GCEventType eventType = event.getEventType(); // regard mixed gc as young gc if (event.isYoungGC()) { eventType = YOUNG_GC; } if (lastEndTime.containsKey(eventType)) { event.setInterval(Math.max(0, event.getStartTime() - lastEndTime.get(eventType))); } lastEndTime.put(eventType, event.getEndTime()); GCCause cause = event.getCause(); if (cause != null) { Map<GCCause, Double> map = lastCauseEndTime.getOrDefault(eventType, null); if (map == null) { map = new HashMap<>(); lastCauseEndTime.put(eventType, map); } if (map.containsKey(cause)) { event.setCauseInterval(Math.max(0, event.getStartTime() - map.get(cause))); } map.put(cause, event.getEndTime()); } } } public String toDebugString() { StringBuilder sb = new StringBuilder(); for (GCEvent event : gcEvents) { sb.append(event.toDebugString(this)).append("\n"); event.phasesDoDFS(phase -> sb.append(" ").append(phase.toDebugString(this)).append("\n")); } return sb.toString(); } public boolean shouldAvoidFullGC() { return collectorType != SERIAL && collectorType != PARALLEL && collectorType != UNKNOWN; } public List<GCEventVO> getEventsVO(List<GCEvent> events, AnalysisConfig config) { GlobalDiagnoseInfo diagnose = getGlobalDiagnoseInfo(config); return events.stream().map(event -> event.toEventVO(this, diagnose)).collect(Collectors.toList()); } public GCEventVO getEventVO(GCEvent event, AnalysisConfig config) { GlobalDiagnoseInfo diagnose = getGlobalDiagnoseInfo(config); return event.toEventVO(this, diagnose); } public PageView<GCEventVO> getGCDetails(PagingRequest pagingRequest, GCDetailFilter filter, AnalysisConfig config) { int firstIndex = (pagingRequest.getPage() - 1) * pagingRequest.getPageSize(); int total = 0; List<GCEvent> resultEvents = new ArrayList<>(); for (GCEvent event : gcEvents) { if (!filter.isFiltered(event)) { if (total >= firstIndex && resultEvents.size() < pagingRequest.getPageSize()) { resultEvents.add(event); } total++; } } List<GCEventVO> result = getEventsVO(resultEvents, config); return new PageView<>(pagingRequest, total, result); } public boolean shouldTryToAvoidMemoryFullGC() { return collectorType != SERIAL && collectorType != PARALLEL; } public GCLogMetadata getGcModelMetadata() { return metadata; } // FIXME: need better implementation private static final List<GCEventType> EVENT_TYPES_SHOULD_NOT_BE_REPORTED_IF_NOT_PRESENT = List.of( G1_CONCURRENT_UNDO_CYCLE, G1_MERGE_HEAP_ROOTS, G1_CONCURRENT_REBUILD_REMEMBERED_SETS, ZGC_CONCURRENT_DETATCHED_PAGES); private List<String> dealEventTypeForMetadata(List<GCEventType> eventTypesExpected, Set<GCEventType> eventTypesActuallyShowUp) { return eventTypesExpected.stream() .filter(eventType -> !EVENT_TYPES_SHOULD_NOT_BE_REPORTED_IF_NOT_PRESENT.contains(eventType) || eventTypesActuallyShowUp.contains(eventType)) .map(GCEventType::getName) .collect(Collectors.toList()); } private void calculateGcModelMetadata() { metadata = new GCLogMetadata(); metadata.setCauses(gcEvents.stream() .map(GCEvent::getCause) .filter(Objects::nonNull) .map(GCCause::getName) .distinct() .collect(Collectors.toList())); metadata.setCollector(getCollectorType().toString()); metadata.setLogStyle(getLogStyle().toString()); metadata.setPauseless(isPauseless()); metadata.setGenerational(isGenerational()); metadata.setMetaspaceCapacityReliable(isMetaspaceCapacityReliable()); metadata.setTimestamp(getReferenceTimestamp()); metadata.setStartTime(getStartTime()); metadata.setEndTime(getEndTime()); Set<GCEventType> eventTypesActuallyShowUp = this.allEvents.stream() .map(GCEvent::getEventType) .collect(Collectors.toSet()); metadata.setParentEventTypes(dealEventTypeForMetadata(getParentEventTypes(), eventTypesActuallyShowUp)); metadata.setImportantEventTypes(dealEventTypeForMetadata(getImportantEventTypes(), eventTypesActuallyShowUp)); metadata.setPauseEventTypes(dealEventTypeForMetadata(getPauseEventTypes(), eventTypesActuallyShowUp)); metadata.setAllEventTypes(dealEventTypeForMetadata(getAllEventTypes(), eventTypesActuallyShowUp)); metadata.setMainPauseEventTypes(dealEventTypeForMetadata(getMainPauseEventTypes(), eventTypesActuallyShowUp)); metadata.setAnalysisConfig(AnalysisConfig.defaultConfig(this)); metadata.setParallelGCThreads(getParallelThread()); metadata.setConcurrentGCThreads(getConcurrentThread()); } protected boolean isMetaspaceCapacityReliable() { return metaspaceCapacityReliable; } public void setMetaspaceCapacityReliable(boolean metaspaceCapacityReliable) { this.metaspaceCapacityReliable = metaspaceCapacityReliable; } public void setParallelThread(int parallelThread) { this.parallelThread = parallelThread; } public void setConcurrentThread(int concurrentThread) { this.concurrentThread = concurrentThread; } public int getParallelThread() { if (parallelThread == Constant.UNKNOWN_INT && vmOptions != null) { return vmOptions.<Long>getOptionValue("ParallelGCThreads", Constant.UNKNOWN_LONG).intValue(); } return parallelThread; } public int getConcurrentThread() { if (concurrentThread == Constant.UNKNOWN_INT && vmOptions != null) { return vmOptions.<Long>getOptionValue("ConcGCThreads", Constant.UNKNOWN_LONG).intValue(); } return concurrentThread; } public GlobalDiagnoseInfo getGlobalDiagnoseInfo(AnalysisConfig config) { GlobalDiagnoseInfo result = globalDiagnoseInfoCache.get(config); if (result == null) { result = calculateGlobalDiagnoseInfo(config); globalDiagnoseInfoCache.put(config, result); } return result; } private GlobalDiagnoseInfo calculateGlobalDiagnoseInfo(AnalysisConfig config) { GlobalDiagnoseInfo info = new GlobalDiagnoseInfo(this, config); EventAbnormalDetector abDetector = new EventAbnormalDetector(this, config, info); abDetector.diagnose(); return info; } @Data @NoArgsConstructor @ToString public static class GCDetailFilter { private String eventType; private GCCause gcCause; //in ms private double logTimeLow; private double logTimeHigh; private double pauseTimeLow; public GCDetailFilter(String eventType, String gcCause, Double logTimeLow, Double logTimeHigh, Double pauseTimeLow) { this.eventType = eventType; this.gcCause = GCCause.getCause(gcCause); this.logTimeLow = logTimeLow == null ? -Double.MAX_VALUE : logTimeLow; this.logTimeHigh = logTimeHigh == null ? Double.MAX_VALUE : logTimeHigh; this.pauseTimeLow = pauseTimeLow == null ? -Double.MAX_VALUE : pauseTimeLow; } public boolean isFiltered(GCEvent event) { return event.getEventType() == SAFEPOINT || !((eventType == null || eventType.equals(event.getEventType().getName())) && (gcCause == null || gcCause == event.getCause()) && (logTimeLow <= event.getEndTime() && event.getEndTime() <= logTimeHigh) && (pauseTimeLow <= event.getPause())); } } }
3,101
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/ParallelGCModel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import java.util.List; import static org.eclipse.jifa.gclog.model.GCEventType.FULL_GC; import static org.eclipse.jifa.gclog.model.GCEventType.YOUNG_GC; public class ParallelGCModel extends GenerationalGCModel { private static GCCollectorType collector = GCCollectorType.PARALLEL; public ParallelGCModel() { super(collector); } private static List<GCEventType> allEventTypes = GCModel.calcAllEventTypes(collector); private static List<GCEventType> pauseEventTypes = GCModel.calcPauseEventTypes(collector); private static List<GCEventType> mainPauseEventTypes = GCModel.calcMainPauseEventTypes(collector); private static List<GCEventType> parentEventTypes = GCModel.calcParentEventTypes(collector); private static List<GCEventType> importantEventTypes = List.of(YOUNG_GC, FULL_GC); @Override protected List<GCEventType> getAllEventTypes() { return allEventTypes; } @Override protected List<GCEventType> getPauseEventTypes() { return pauseEventTypes; } @Override protected List<GCEventType> getMainPauseEventTypes() { return mainPauseEventTypes; } @Override protected List<GCEventType> getImportantEventTypes() { return importantEventTypes; } @Override protected List<GCEventType> getParentEventTypes() { return parentEventTypes; } }
3,102
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/GCModelFactory.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import org.eclipse.jifa.common.util.ErrorUtil; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; public class GCModelFactory { public static GCModel getModel(GCCollectorType collectorType) { switch (collectorType) { case G1: return new G1GCModel(); case CMS: return new CMSGCModel(); case SERIAL: return new SerialGCModel(); case PARALLEL: return new ParallelGCModel(); case ZGC: return new ZGCModel(); case UNKNOWN: return new UnknownGCModel(); default: ErrorUtil.shouldNotReachHere(); } return null; } }
3,103
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/GenerationalGCModel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import org.eclipse.jifa.gclog.event.GCEvent; import org.eclipse.jifa.gclog.event.evnetInfo.GCCause; import org.eclipse.jifa.gclog.event.evnetInfo.MemoryArea; import org.eclipse.jifa.gclog.event.evnetInfo.GCEventBooleanType; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import org.eclipse.jifa.gclog.model.modeInfo.GCLogStyle; import java.util.ArrayList; import java.util.List; import static org.eclipse.jifa.gclog.model.GCEventType.FULL_GC; import static org.eclipse.jifa.gclog.model.GCEventType.YOUNG_GC; public abstract class GenerationalGCModel extends GCModel { public GenerationalGCModel(GCCollectorType type) { super(type); } private void dealYoungGCThatBecomeFullGCUnified() { List<GCEvent> newEvents = new ArrayList<>(); List<GCEvent> oldEvents = getGcEvents(); boolean remove = false; for (int i = 0; i < oldEvents.size() - 1; i++) { GCEvent event = oldEvents.get(i); GCEvent nextEvent = oldEvents.get(i + 1); remove = event.getEventType() == YOUNG_GC && nextEvent.getEventType() == FULL_GC && event.getStartTime() <= nextEvent.getStartTime() && event.getEndTime() >= nextEvent.getEndTime(); if (remove) { event.setEventType(FULL_GC); event.setTrue(GCEventBooleanType.YOUNG_GC_BECOME_FULL_GC); event.setPhases(nextEvent.getPhases()); i++; // remove the full gc } if (event.getEventType() == FULL_GC && event.isTrue(GCEventBooleanType.PROMOTION_FAILED)) { event.setCause(GCCause.PROMOTION_FAILED); } newEvents.add(event); } if (!remove) { newEvents.add(oldEvents.get(oldEvents.size() - 1)); } setGcEvents(newEvents); } private void dealYoungGCThatBecomeFullGCPreUnified() { for (GCEvent event : getGcEvents()) { // if metaspace is printed, it must be a full gc if (event.getEventType() == YOUNG_GC && event.getMemoryItem(MemoryArea.METASPACE) != null) { event.setEventType(FULL_GC); event.setTrue(GCEventBooleanType.YOUNG_GC_BECOME_FULL_GC); } if (event.getEventType() == FULL_GC && event.isTrue(GCEventBooleanType.PROMOTION_FAILED)) { event.setCause(GCCause.PROMOTION_FAILED); } } } private void youngGenUsedShouldBeZeroAfterFullGC() { if (getLogStyle() != GCLogStyle.PRE_UNIFIED) { return; } for (GCEvent event : getGcEvents()) { if (event.getEventType() == FULL_GC && event.getMemoryItem(MemoryArea.YOUNG) != null) { event.getMemoryItem(MemoryArea.YOUNG).setPostUsed(0); } } } @Override protected void doBeforeCalculatingDerivedInfo() { if (getLogStyle() == GCLogStyle.UNIFIED) { dealYoungGCThatBecomeFullGCUnified(); } else if (getLogStyle() == GCLogStyle.PRE_UNIFIED) { dealYoungGCThatBecomeFullGCPreUnified(); } youngGenUsedShouldBeZeroAfterFullGC(); } }
3,104
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/SerialGCModel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import java.util.List; import static org.eclipse.jifa.gclog.model.GCEventType.FULL_GC; import static org.eclipse.jifa.gclog.model.GCEventType.YOUNG_GC; public class SerialGCModel extends GenerationalGCModel { private static GCCollectorType collector = GCCollectorType.SERIAL; public SerialGCModel() { super(collector); } private static List<GCEventType> allEventTypes = GCModel.calcAllEventTypes(collector); private static List<GCEventType> pauseEventTypes = GCModel.calcPauseEventTypes(collector); private static List<GCEventType> mainPauseEventTypes = GCModel.calcMainPauseEventTypes(collector); private static List<GCEventType> parentEventTypes = GCModel.calcParentEventTypes(collector); private static List<GCEventType> importantEventTypes = List.of(YOUNG_GC, FULL_GC); @Override protected List<GCEventType> getAllEventTypes() { return allEventTypes; } @Override protected List<GCEventType> getPauseEventTypes() { return pauseEventTypes; } @Override protected List<GCEventType> getMainPauseEventTypes() { return mainPauseEventTypes; } @Override protected List<GCEventType> getImportantEventTypes() { return importantEventTypes; } @Override protected List<GCEventType> getParentEventTypes() { return parentEventTypes; } }
3,105
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/ZGCModel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.eclipse.jifa.gclog.event.GCEvent; import org.eclipse.jifa.gclog.event.TimedEvent; import org.eclipse.jifa.gclog.event.evnetInfo.MemoryArea; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import org.eclipse.jifa.gclog.util.Constant; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_INT; import static org.eclipse.jifa.gclog.model.GCEventType.*; public class ZGCModel extends GCModel { // key of maps here should include unit like // "Memory: Allocation Rate MB/s" to deduplicate private List<ZStatistics> statistics = new ArrayList<>(); private List<GCEvent> allocationStalls = new ArrayList<>(); private long recommendMaxHeapSize = UNKNOWN_INT; private static GCCollectorType collector = GCCollectorType.ZGC; public ZGCModel() { super(collector); this.setMetaspaceCapacityReliable(true); } private static List<GCEventType> allEventTypes = GCModel.calcAllEventTypes(collector); private static List<GCEventType> pauseEventTypes = GCModel.calcPauseEventTypes(collector); private static List<GCEventType> mainPauseEventTypes = GCModel.calcMainPauseEventTypes(collector); private static List<GCEventType> parentEventTypes = GCModel.calcParentEventTypes(collector); private static List<GCEventType> importantEventTypes = List.of(ZGC_GARBAGE_COLLECTION, ZGC_PAUSE_MARK_START, ZGC_PAUSE_MARK_END, ZGC_PAUSE_RELOCATE_START, ZGC_CONCURRENT_MARK, ZGC_CONCURRENT_NONREF, ZGC_CONCURRENT_SELECT_RELOC_SET, ZGC_CONCURRENT_PREPARE_RELOC_SET, ZGC_CONCURRENT_RELOCATE); @Override protected List<GCEventType> getAllEventTypes() { return allEventTypes; } @Override protected List<GCEventType> getPauseEventTypes() { return pauseEventTypes; } @Override protected List<GCEventType> getMainPauseEventTypes() { return mainPauseEventTypes; } @Override protected List<GCEventType> getImportantEventTypes() { return importantEventTypes; } @Override protected List<GCEventType> getParentEventTypes() { return parentEventTypes; } public List<GCEvent> getAllocationStalls() { return allocationStalls; } public void addAllocationStalls(GCEvent allocationStall) { this.allocationStalls.add(allocationStall); } public List<ZStatistics> getStatistics() { return statistics; } @Override public long getRecommendMaxHeapSize() { if (recommendMaxHeapSize == UNKNOWN_INT && !statistics.isEmpty()) { // used at marking start + garbage collection cycle * allocation rate int statisticIndex = 0; for (GCEvent collection : getGcCollectionEvents()) { if (collection.getEventType() != ZGC_GARBAGE_COLLECTION) { continue; } if (collection.getMemoryItem(MemoryArea.HEAP).getPreUsed() == UNKNOWN_INT) { continue; } while (statisticIndex < statistics.size() && statistics.get(statisticIndex).getStartTime() < collection.getEndTime()) { statisticIndex++; } if (statisticIndex >= statistics.size()) { break; } double collectionCycleMs = statistics.get(statisticIndex).get("Collector: Garbage Collection Cycle ms").getMax10s(); double allocationRateMBps = statistics.get(statisticIndex).get("Memory: Allocation Rate MB/s").getMax10s(); double size = collection.getMemoryItem(MemoryArea.HEAP).getPreUsed() + (collectionCycleMs / Constant.MS2S) * (allocationRateMBps * Constant.KB2MB); recommendMaxHeapSize = Math.max(recommendMaxHeapSize, (long) size); } } return recommendMaxHeapSize; } @NoArgsConstructor @AllArgsConstructor public static class ZStatistics extends TimedEvent { private Map<String, ZStatisticsItem> items = new HashMap<>(); public ZStatisticsItem get(String key) { return items.getOrDefault(key, null); } public void put(String key, ZStatisticsItem item) { items.put(key, item); } public Map<String, ZStatisticsItem> getStatisticItems() { return items; } } @Data @NoArgsConstructor @AllArgsConstructor public static class ZStatisticsItem { private double avg10s; private double max10s; private double avg10m; private double max10m; private double avg10h; private double max10h; private double avgTotal; private double maxTotal; } }
3,106
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/CMSGCModel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model; import com.google.common.util.concurrent.AtomicDouble; import org.eclipse.jifa.gclog.event.GCEvent; import org.eclipse.jifa.gclog.event.evnetInfo.GCEventBooleanType; import org.eclipse.jifa.gclog.model.modeInfo.GCCollectorType; import org.eclipse.jifa.gclog.util.LongData; import org.eclipse.jifa.gclog.vo.TimeRange; import java.util.List; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_LONG; import static org.eclipse.jifa.gclog.event.evnetInfo.MemoryArea.METASPACE; import static org.eclipse.jifa.gclog.event.evnetInfo.MemoryArea.OLD; import static org.eclipse.jifa.gclog.model.GCEventType.*; public class CMSGCModel extends GenerationalGCModel { private static GCCollectorType collector = GCCollectorType.CMS; public CMSGCModel() { super(collector); } private static List<GCEventType> allEventTypes = GCModel.calcAllEventTypes(collector); private static List<GCEventType> pauseEventTypes = GCModel.calcPauseEventTypes(collector); private static List<GCEventType> mainPauseEventTypes = GCModel.calcMainPauseEventTypes(collector); private static List<GCEventType> parentEventTypes = GCModel.calcParentEventTypes(collector); private static List<GCEventType> importantEventTypes = List.of(YOUNG_GC, FULL_GC, CMS_CONCURRENT_MARK_SWEPT, CMS_INITIAL_MARK, CMS_CONCURRENT_ABORTABLE_PRECLEAN, CMS_FINAL_REMARK, CMS_CONCURRENT_SWEEP); @Override protected List<GCEventType> getAllEventTypes() { return allEventTypes; } @Override protected List<GCEventType> getPauseEventTypes() { return pauseEventTypes; } @Override protected List<GCEventType> getMainPauseEventTypes() { return mainPauseEventTypes; } @Override protected List<GCEventType> getImportantEventTypes() { return importantEventTypes; } @Override protected List<GCEventType> getParentEventTypes() { return parentEventTypes; } @Override protected void doAfterCalculatingDerivedInfo() { decideGCsAfterOldGC(); } private void decideGCsAfterOldGC() { double lastCMSEndTime = Double.MAX_VALUE; double lastRemarkEndTime = Double.MAX_VALUE; for (GCEvent event : getGcEvents()) { if (event.getEventType() == CMS_CONCURRENT_MARK_SWEPT) { if (!event.containPhase(CMS_CONCURRENT_FAILURE) && !event.containPhase(CMS_CONCURRENT_INTERRUPTED)) { lastCMSEndTime = event.getEndTime(); } GCEvent remark = event.getLastPhaseOfType(CMS_FINAL_REMARK); if (remark != null) { lastRemarkEndTime = remark.getEndTime(); } } else if ((event.getEventType() == YOUNG_GC || event.getEventType() == FULL_GC)) { if (event.getStartTime() > lastRemarkEndTime) { event.setTrue(GCEventBooleanType.GC_AFTER_REMARK); lastRemarkEndTime = Double.MAX_VALUE; } if (event.getStartTime() > lastCMSEndTime) { // Although jdk11 prints old gen usage after old gc, still use // the next gc result for convenience event.setTrue(GCEventBooleanType.GC_AT_END_OF_OLD_CYCLE); lastCMSEndTime = Double.MAX_VALUE; } } } } @Override protected void calculateUsedAvgAfterOldGC(TimeRange range, LongData[][] data) { iterateEventsWithinTimeRange(getGcEvents(), range, event -> { if (event.isTrue(GCEventBooleanType.GC_AT_END_OF_OLD_CYCLE) && event.getMemoryItem(OLD) != null) { data[1][3].add(event.getMemoryItem(OLD).getPreUsed()); } if (event.isTrue(GCEventBooleanType.GC_AFTER_REMARK) && event.getMemoryItem(METASPACE) != null) { data[4][3].add(event.getMemoryItem(METASPACE).getPreUsed()); } }); } }
3,107
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/modeInfo/GCCollectorType.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model.modeInfo; public enum GCCollectorType { SERIAL("Serial GC"), PARALLEL("Parallel GC"), ZGC("ZGC"), SHENANDOAH("Shenandoah GC"), G1("G1 GC"), CMS("CMS GC"), UNKNOWN("Unknown GC"); private String name; GCCollectorType(String name) { this.name = name; } public String getName() { return name; } @Override public String toString() { return name; } }
3,108
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/modeInfo/GCLogMetadata.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model.modeInfo; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.eclipse.jifa.gclog.diagnoser.AnalysisConfig; import java.util.List; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_DOUBLE; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_INT; /** * This class provides some necessary information to the frontend. */ @Data @NoArgsConstructor @AllArgsConstructor public class GCLogMetadata { private String collector; private String logStyle; private double startTime = UNKNOWN_DOUBLE; private double endTime = UNKNOWN_DOUBLE; private double timestamp = UNKNOWN_DOUBLE; private boolean generational = true; private boolean pauseless = false; private boolean metaspaceCapacityReliable = false; private int parallelGCThreads = UNKNOWN_INT; private int concurrentGCThreads = UNKNOWN_INT; private List<String> parentEventTypes; private List<String> importantEventTypes; private List<String> pauseEventTypes; private List<String> mainPauseEventTypes; private List<String> allEventTypes; private List<String> causes; private AnalysisConfig analysisConfig; }
3,109
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/modeInfo/GCLogStyle.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model.modeInfo; public enum GCLogStyle { PRE_UNIFIED("preunified"), UNIFIED("unified"), UNKNOWN("unknown"); private String name; GCLogStyle(String name) { this.name = name; } public String getName() { return name; } @Override public String toString() { return name; } }
3,110
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/model/modeInfo/VmOptions.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.model.modeInfo; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.eclipse.jifa.gclog.util.GCLogUtil; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_INT; import static org.eclipse.jifa.gclog.util.Constant.KB2MB; public class VmOptions { private Map<String, VmOption> options = new LinkedHashMap<>(); // use LinkedHashMap to preserve option order private String originalOptionString; private static Map<String, Integer> optionPriority = new ConcurrentHashMap<>(); static { saveOptionsPriority(); } private static void saveOptionsPriority() { List.of("Xms", "Xmx", "Xmn", "InitialHeapSize", "MaxHeapSize", "NewSize", "NewRatio", "MaxNewSize", "MetaspaceSize", "MaxMetaspaceSize", "MaxDirectMemorySize").forEach(s -> optionPriority.put(s, 9)); List.of("UseCMSInitiatingOccupancyOnly", "UseCMSCompactAtFullCollection", "MaxGCPauseMillis", "InitiatingHeapOccupancyPercent").forEach(s -> optionPriority.put(s, 7)); List.of("MinHeapFreeRatio", "MaxHeapFreeRatio", "MaxMetaspaceFreeRatio", "MinMetaspaceFreeRatio", "UseCompressedOops", "UseCompressedClassPointers", "SurvivorRatio", "ExplicitGCInvokesConcurrent", "DisableExplicitGC", "ParallelRefProcEnabled", "MaxTenuringThreshold", "PrintPromotionFailure", "TargetSurvivorRatio", "ParGCCardsPerStrideChunk", "UseGCOverheadLimit", "ScavengeBeforeFullGC", "PretenureSizeThreshold", "InitialTenuringThreshold", "GCTimeRatio", "ExplicitGCInvokesConcurrentAndUnloadsClasses", "SoftRefLRUPolicyMSPerMB", "GCLockerRetryAllocationCount", "UseCountedLoopSafepoints", "ReduceInitialCardMarks", "UseAdaptiveSizePolicy", "ClassUnloading", "ClassUnloadingWithConcurrentMark").forEach(s -> optionPriority.put(s, 6)); List.of("verbose", "PrintHeapAtGC", "PrintTenuringDistribution", "PrintAdaptiveSizePolicy", "UseAsyncGCLog", "AsyncGCLogBufferSize", "AsyncGCLogBufferFlushThreshold", "UseGCLogFileRotation", "NumberOfGCLogFiles", "GCLogFileSize", "PrintStringDeduplicationStatistics", "PrintStringTableStatistics", "PrintSafepointStatistics", "PrintSafepointStatisticsCount", "PrintFLSStatistics", "PrintJNIGCStalls").forEach(s -> optionPriority.put(s, 5)); } /* priority rule: * 10: UseXXGC * 9: Heap. generation and metaspace size * 8: GC Threads related * 7: GC specific tuning options * 6: GC tuning options shared by gc collectors * 5: GC log related * 0: other */ private int getOptionPriority(String optionName) { if (optionPriority.containsKey(optionName)) { return optionPriority.get(optionName); } int priority; if (optionName.startsWith("Use") && optionName.endsWith("GC")) { priority = 10; } else if (optionName.endsWith("GCThreads")) { priority = 8; } else if (optionName.startsWith("Z") || optionName.startsWith("G1") || optionName.startsWith("CMS")) { priority = 7; } else if (optionName.contains("PLAB") || optionName.contains("TLAB")) { priority = 6; } else if (optionName.startsWith("PrintGC") || optionName.startsWith("Xlog")) { priority = 5; } else { priority = 0; } optionPriority.put(optionName, priority); return priority; } // notice that integers are long type and options indicating size are in Byte @SuppressWarnings("unchecked") public <T> T getOptionValue(String key) { if (options.containsKey(key)) { return (T) options.get(key).getValue(); } else { return null; } } @SuppressWarnings("unchecked") public <T> T getOptionValue(String key, T defaultValue) { if (options.containsKey(key)) { return (T) options.get(key).getValue(); } else { return defaultValue; } } public boolean containOption(String key) { return options.containsKey(key); } public VmOptions(String vmOptionsString) { originalOptionString = vmOptionsString; if (vmOptionsString == null) { return; } for (String option : vmOptionsString.split(" +")) { addVmOption(option); } } private void addVmOption(String optionString) { if (optionString.startsWith("-XX:")) { parseSingleOption(optionString, optionString.substring(4)); } else if (optionString.startsWith("-D")) { parseSingleOption(optionString, optionString.substring(2)); } else if (optionString.startsWith("-X")) { parseSingleOptionWithX(optionString, optionString.substring(2)); } else if (optionString.startsWith("-")) { parseSingleOption(optionString, optionString.substring(1)); } } private void parseSingleOptionWithX(String optionString, String content) { if (content == null) { return; } if (content.startsWith("mn") || content.startsWith("ms") || content.startsWith("mx") || content.startsWith("ss")) { // add 'X' for convention String optionName = "X" + content.substring(0, 2); options.put(optionName, new VmOption(optionString, optionName, GCLogUtil.toByte(content.substring(2)) * (long) KB2MB, getOptionPriority(optionName))); } int mid = content.indexOf('='); if (mid < 0) { mid = content.indexOf(':'); } if (mid >= 0) { String optionName = 'X' + content.substring(0, mid); options.put(optionName, new VmOption(optionString, optionName, decideTypeAndParse(content.substring(mid + 1)), getOptionPriority(optionName))); return; } options.put(content, new VmOption(optionString, content, true, getOptionPriority(content))); } private void parseSingleOption(String optionString, String content) { if (content == null || content.isEmpty()) { return; } if (content.charAt(0) == '+') { String optionName = content.substring(1); options.put(optionName, new VmOption(optionString, optionName, true, getOptionPriority(optionName))); return; } if (content.charAt(0) == '-') { String optionName = content.substring(1); options.put(optionName, new VmOption(optionString, optionName, false, getOptionPriority(optionName))); return; } int mid = content.indexOf('='); if (mid < 0) { mid = content.indexOf(':'); } if (mid >= 0) { String optionName = content.substring(0, mid); options.put(optionName, new VmOption(optionString, optionName, decideTypeAndParse(content.substring(mid + 1)), getOptionPriority(optionName))); return; } options.put(content, new VmOption(optionString, content, true, getOptionPriority(content))); } private static final Pattern NUMBER_PATTERN = Pattern.compile("\\d+"); private static final Pattern SIZE_PATTERN = Pattern.compile("\\d+[kmgt]]?[b]?"); private Object decideTypeAndParse(String s) { s = s.toLowerCase(); if (NUMBER_PATTERN.matcher(s).matches()) { return Long.parseLong(s); } else if (SIZE_PATTERN.matcher(s).matches()) { return GCLogUtil.toByte(s); } else { return s; } } public String getOriginalOptionString() { return originalOptionString; } public long getMetaspaceSize() { Long metaspaceSize = getOptionValue("MetaspaceSize"); Long maxMetaspaceSize = getOptionValue("MaxMetaspaceSize"); if (metaspaceSize != null && metaspaceSize.equals(maxMetaspaceSize)) { return metaspaceSize; } else { return UNKNOWN_INT; } } public VmOptionResult getVmOptionResult() { VmOptionResult optionResult = new VmOptionResult(); options.values().stream() .sorted((o1, o2) -> o2.priority - o1.priority) .forEach(o -> (o.isGCRelated() ? optionResult.gcRelated : optionResult.other).add(new VmOptionVo(o.originalText))); return optionResult; } @Override public String toString() { return options.toString(); } @Data @NoArgsConstructor @AllArgsConstructor public static class VmOption { private String originalText; private String optionName; private Object value; private int priority; public boolean isGCRelated() { return priority != 0; } } @Data @NoArgsConstructor @AllArgsConstructor public static class VmOptionVo { private String text; } @Data @NoArgsConstructor public static class VmOptionResult { private List<VmOptionVo> gcRelated = new ArrayList<>(); private List<VmOptionVo> other = new ArrayList<>(); } }
3,111
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/ThreadEvent.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event; import org.eclipse.jifa.gclog.diagnoser.GlobalDiagnoseInfo; import org.eclipse.jifa.gclog.model.GCModel; import org.eclipse.jifa.gclog.vo.GCEventVO; public class ThreadEvent extends GCEvent { private String threadName; public ThreadEvent() { } public String getThreadName() { return threadName; } public void setThreadName(String threadName) { this.threadName = threadName; } @Override protected void appendClassSpecificInfo(StringBuilder sb) { sb.append(threadName); } @Override protected void fillInfoToVO(GCModel model, GCEventVO vo, GlobalDiagnoseInfo diagnose) { super.fillInfoToVO(model, vo, diagnose); vo.saveInfo("threadName", threadName); } }
3,112
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/TimedEvent.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event; import org.eclipse.jifa.gclog.diagnoser.GlobalDiagnoseInfo; import org.eclipse.jifa.gclog.model.GCModel; import org.eclipse.jifa.gclog.vo.GCEventVO; import org.eclipse.jifa.gclog.vo.TimeRange; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_DOUBLE; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_INT; public class TimedEvent { // We assume that start time always exists. We will refuse to analyze logs that does not print any time, // and will add a suitable start time to events that does not have a start time in log. // Unit of all time variables is ms. protected double startTime = UNKNOWN_DOUBLE; protected int id = UNKNOWN_INT; // id is used to identify events, should not be showed to user // Real time duration of event. The duration may not exist, and we should always check its existence when using. private double duration = UNKNOWN_DOUBLE; public double getStartTime() { return startTime; } public double getDuration() { return duration; } public double getEndTime() { if (getStartTime() != UNKNOWN_DOUBLE && getDuration() != UNKNOWN_DOUBLE) { return getStartTime() + getDuration(); } else { return UNKNOWN_DOUBLE; } } public void setStartTime(double startTime) { this.startTime = startTime; } public void setDuration(double duration) { this.duration = duration; } public TimedEvent(double startTime, double duration) { this.startTime = startTime; this.duration = duration; } public TimedEvent(double startTime) { this.startTime = startTime; } public TimedEvent() { } public static TimedEvent fromTimeRange(TimeRange range) { return new TimedEvent(range.getStart(), range.length()); } public TimeRange toTimeRange() { if (duration != UNKNOWN_DOUBLE) { return new TimeRange(getStartTime(), getEndTime()); } else { return new TimeRange(getStartTime(), getStartTime()); } } public static TimedEvent newByStartEnd(double start, double end) { return new TimedEvent(start, end - start); } protected void fillInfoToVO(GCModel model, GCEventVO vo, GlobalDiagnoseInfo diagnose) { vo.saveInfo("id", id); vo.saveInfo("startTime", getStartTime()); vo.saveInfo("duration", getDuration()); } // notice: should call GCModel.transformEventsToVo to create a vo because diagnose info is filled there public GCEventVO toEventVO(GCModel model, GlobalDiagnoseInfo diagnose) { GCEventVO vo = new GCEventVO(); fillInfoToVO(model, vo, diagnose); return vo; } }
3,113
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/GCEvent.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event; import org.eclipse.jifa.common.util.ErrorUtil; import org.eclipse.jifa.gclog.diagnoser.GlobalDiagnoseInfo; import org.eclipse.jifa.gclog.event.evnetInfo.*; import org.eclipse.jifa.gclog.model.GCEventType; import org.eclipse.jifa.gclog.model.GCModel; import org.eclipse.jifa.gclog.util.Constant; import org.eclipse.jifa.gclog.vo.GCEventVO; import java.text.SimpleDateFormat; import java.util.*; import java.util.function.Consumer; import static org.eclipse.jifa.gclog.event.evnetInfo.MemoryArea.*; import static org.eclipse.jifa.gclog.event.evnetInfo.GCEventLevel.EVENT; import static org.eclipse.jifa.gclog.event.evnetInfo.GCEventLevel.PHASE; import static org.eclipse.jifa.gclog.util.Constant.KB2MB; public class GCEvent extends TimedEvent { /* All info saved here should not be relevant to AnalysisConfig. Anything related to the config should be saved in EventDiagnoseInfo **/ private int gcid = Constant.UNKNOWN_INT; private CpuTime cpuTime; private ReferenceGC referenceGC; private GCEventType eventType = GCEventType.UNDEFINED; private GCCause cause; private GCMemoryItem[] memory; // phases may contain more detailed info about an event. For simplicity, we put subphase of an event as // a direct child in phases field rather than child of child. Use level field of EventType to check // if an event type should be a subphase. private List<GCEvent> phases; private BitSet booleans; private double pause = Constant.UNKNOWN_DOUBLE; private double interval = Constant.UNKNOWN_DOUBLE; // interval from last event with same type private double causeInterval = Constant.UNKNOWN_DOUBLE; // interval from last event with same type and cause private long promotion = Constant.UNKNOWN_INT; private long allocation = Constant.UNKNOWN_INT; private long reclamation = Constant.UNKNOWN_INT; public GCMemoryItem[] getMemoryItems() { return memory; } public GCMemoryItem getMemoryItem(MemoryArea area) { if (memory == null) { return null; } return memory[area.ordinal()]; } public GCMemoryItem getMemoryItemOrEmptyObject(MemoryArea area) { GCMemoryItem result = getMemoryItem(area); if (result == null) { return new GCMemoryItem(area); } else { return result; } } public void setMemoryItem(GCMemoryItem item) { setMemoryItem(item, false); } public void setMemoryItem(GCMemoryItem item, boolean force) { if (item == null || item.isEmpty()) { return; } if (memory == null) { memory = new GCMemoryItem[values().length]; } if (force || getMemoryItem(item.getArea()) == null) { memory[item.getArea().ordinal()] = item; } } public void setMemoryItems(GCMemoryItem[] memory) { this.memory = memory; } public List<GCEvent> getPhases() { return phases; } public GCEvent() { } public void setGcid(int gcid) { this.gcid = gcid; } public int getId() { return id; } public void setId(int id) { this.id = id; } public GCEvent getLastPhaseOfType(GCEventType type) { if (phases == null) { return null; } for (int i = phases.size() - 1; i >= 0; i--) { GCEvent phase = phases.get(i); if (phase.getEventType().equals(type)) { return phase; } } return null; } public boolean containPhase(GCEventType type) { return getLastPhaseOfType(type) != null; } public void setBoolean(GCEventBooleanType type, boolean value) { if (booleans == null) { booleans = new BitSet(); } booleans.set(type.ordinal(), value); } public void setTrue(GCEventBooleanType type) { setBoolean(type, true); } public boolean isTrue(GCEventBooleanType type) { return booleans != null && booleans.get(type.ordinal()); } public ReferenceGC getReferenceGC() { return referenceGC; } public void setReferenceGC(ReferenceGC referenceGC) { this.referenceGC = referenceGC; } public int getGcid() { return gcid; } public GCEventType getEventType() { return eventType; } public boolean isYoungGC() { return this.eventType != null && this.eventType.isYoungGC(); } public boolean isOldGC() { return this.eventType != null && this.eventType.isOldGC(); } public boolean isFullGC() { return this.eventType != null && this.eventType.isFullGC(); } public GCCause getCause() { return cause; } public CpuTime getCpuTime() { return cpuTime; } public long getAllocation() { return allocation; } public long getReclamation() { return reclamation; } public void setPromotion(long promotion) { if (promotion < 0) { promotion = Constant.UNKNOWN_INT; } this.promotion = promotion; } public void setAllocation(long allocation) { if (allocation < 0) { allocation = Constant.UNKNOWN_INT; } this.allocation = allocation; } public void setReclamation(long reclamation) { if (reclamation < 0) { reclamation = Constant.UNKNOWN_INT; } this.reclamation = reclamation; } public void setEventType(GCEventType eventType) { this.eventType = eventType; } public void setCause(String cause) { this.cause = GCCause.getCause(cause); } public void setCause(GCCause cause) { this.cause = cause; } public void setCpuTime(CpuTime cpuTime) { this.cpuTime = cpuTime; } public boolean hasPhases() { return phases != null; } public void addPhase(GCEvent phase) { if (phases == null) { phases = new ArrayList<>(2); } phases.add(phase); } public void setPhases(List<GCEvent> phases) { this.phases = phases; } public boolean hasPromotion() { return eventType != null && eventType.hasObjectPromotion(); } public double getInterval() { return interval; } public void setInterval(double interval) { this.interval = interval; } public double getCauseInterval() { return causeInterval; } public void setCauseInterval(double causeInterval) { this.causeInterval = causeInterval; } public boolean isPause() { return getEventType().getPause() == GCPause.PAUSE; } public boolean isPartialConcurrent() { return getEventType().getPause() == GCPause.PARTIAL; } public boolean isConcurrent() { return getEventType().getPause() == GCPause.CONCURRENT; } public boolean isBadFullGC() { return isFullGC() && getCause().isBad(); } private static final double GCTRACETIME_TRACECPUTIME_CLOSE_THRESHOLD = 10.0; public double getPause() { if (pause == Constant.UNKNOWN_DOUBLE) { switch (eventType.getPause()) { case PAUSE: // In most cases, duration is more accurate than cputime because of rounding error. // In very rare cases, cputime may be significantly larger than duration. In these cases // cputime is more accurate value. if (cpuTime != null) { if (getDuration() != Constant.UNKNOWN_DOUBLE && Math.abs(cpuTime.getReal() - getDuration()) > GCTRACETIME_TRACECPUTIME_CLOSE_THRESHOLD) { pause = getCpuTime().getReal(); } else { pause = getDuration(); } } else { pause = getDuration(); } break; case CONCURRENT: pause = 0; break; case PARTIAL: pause = 0; if (phases != null) { for (GCEvent phase : phases) { if (phase.getEventType().getPause() == GCPause.PAUSE && phase.getEventLevel() == PHASE) { pause += phase.getPause(); } } } break; default: ErrorUtil.shouldNotReachHere(); } } return pause; } public long getPromotion() { return promotion; } private static final SimpleDateFormat TIMESTAMP_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); protected void appendStartTimestamp(StringBuilder sb, double referenceTimestamp) { if (referenceTimestamp != Constant.UNKNOWN_DOUBLE && startTime != Constant.UNKNOWN_DOUBLE) { sb.append(TIMESTAMP_FORMAT.format((long) (referenceTimestamp + startTime))).append(" "); } } protected void appendStartTime(StringBuilder sb) { if (startTime != Constant.UNKNOWN_DOUBLE) { sb.append(String.format("%.3f: ", getStartTime() / 1000)); } } private void appendGCSpecialSituation(StringBuilder sb) { List<String> parts = new ArrayList<>(); if (isTrue(GCEventBooleanType.INITIAL_MARK)) { parts.add("Initial Mark"); } if (isTrue(GCEventBooleanType.PREPARE_MIXED)) { parts.add("Prepare Mixed"); } if (isTrue(GCEventBooleanType.TO_SPACE_EXHAUSTED)) { parts.add("To-space Exhausted"); } if (parts.isEmpty()) { return; } sb.append("("); for (int i = 0; i < parts.size(); i++) { if (i != 0) { sb.append(", "); } sb.append(parts.get(i)); } sb.append(") "); } private void appendEventType(StringBuilder sb) { sb.append(eventType).append(" "); } protected void appendClassSpecificInfo(StringBuilder sb) { if (gcid != Constant.UNKNOWN_INT) { sb.append('(').append(gcid).append(") "); } if (cause != null) { sb.append("(").append(cause).append(") "); } appendGCSpecialSituation(sb); if (getDuration() != Constant.UNKNOWN_DOUBLE) { sb.append(String.format("%.3f", getDuration() / 1000)).append("s "); } memoryItemDo(item -> sb.append("[").append(item).append("] ")); boolean moreInfoAvailable = getEventLevel() == EVENT && (getPromotion() != Constant.UNKNOWN_INT || getInterval() != Constant.UNKNOWN_DOUBLE); if (moreInfoAvailable) { boolean first = true; sb.append("["); if (getPromotion() != Constant.UNKNOWN_INT) { sb.append("promotion ").append(getPromotion() / (long) KB2MB).append(" K"); first = false; } if (getInterval() != Constant.UNKNOWN_INT) { if (!first) { sb.append(", "); } sb.append("interval ").append(String.format("%.3f", getInterval() / 1000)).append(" s"); } sb.append("] "); } if (cpuTime != null) { sb.append("[").append(cpuTime).append("] "); } } // this function is used for printing while debugging. The result is never showed to user // reference format: 14.244: Full GC (1) (Ergonomics) 0.001s [Young: 2548K->0K(18944K)] [Old: 33595K->11813K(44032K)] [Total: 36143K->11813K(62976K)] [Metaspace: 19355K->19355K(1067008K)] [promotion 3000 K, interval 30 s] public String toDebugString(GCModel model) { StringBuilder sb = new StringBuilder(); appendStartTimestamp(sb, model.getReferenceTimestamp()); appendStartTime(sb); appendEventType(sb); appendClassSpecificInfo(sb); return sb.toString(); } // This function is used for printing while debugging(mainly used in IDE). The result is nover showed to user. // toDebugString shows more info than this function @Override public String toString() { StringBuilder sb = new StringBuilder(); appendStartTime(sb); appendEventType(sb); appendClassSpecificInfo(sb); return sb.toString(); } public GCEventLevel getEventLevel() { return eventType.getLevel(); } public void pauseEventOrPhasesDo(Consumer<GCEvent> consumer) { if (getEventLevel() != EVENT || isTrue(GCEventBooleanType.IGNORE_PAUSE)) { return; } switch (getEventType().getPause()) { case PAUSE: consumer.accept(this); break; case PARTIAL: phasesDoDFS(phase -> { if (phase.getEventType().getPause() == GCPause.PAUSE && phase.getEventType().getLevel() == PHASE) { consumer.accept(phase); } }); } } public void memoryItemDo(Consumer<GCMemoryItem> consumer) { if (memory == null) { return; } for (GCMemoryItem item : memory) { if (item != null && !item.isEmpty()) { consumer.accept(item); } } } public void phasesDoDFS(Consumer<GCEvent> consumer) { if (phases != null) { for (GCEvent phase : phases) { consumer.accept(phase); } } } private Map<String, GCMemoryItem> memoryToVO() { Map<String, GCMemoryItem> result = new HashMap<>(); memoryItemDo(item -> result.put(item.getArea().getName(), item)); return result; } @Override protected void fillInfoToVO(GCModel model, GCEventVO vo, GlobalDiagnoseInfo diagnose) { super.fillInfoToVO(model, vo, diagnose); vo.saveInfo("eventType", eventType.getName()); vo.saveInfo("gcid", gcid); vo.saveInfo("cputime", cpuTime); vo.saveInfo("referenceGC", referenceGC); vo.saveInfo("memory", memoryToVO()); vo.saveInfo("pause", pause); vo.saveInfo("interval", interval); vo.saveInfo("causeInterval", causeInterval); vo.saveInfo("promotion", promotion); vo.saveInfo("reclamation", reclamation); vo.saveInfo("allocation", allocation); phasesDoDFS(phase -> vo.addPhase(phase.toEventVO(model, diagnose))); vo.saveInfo("diagnose", diagnose.getEventDiagnoseVO(this)); for (GCEventBooleanType type : GCEventBooleanType.values()) { if (isTrue(type)) { vo.saveInfo(type.name(), true); } } if (cause != null) { vo.saveInfo("cause", cause.getName()); } } }
3,114
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/Safepoint.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event; import org.eclipse.jifa.gclog.model.GCEventType; import org.eclipse.jifa.gclog.model.GCModel; import org.eclipse.jifa.gclog.util.Constant; public class Safepoint extends GCEvent { private double timeToEnter = Constant.UNKNOWN_DOUBLE; public Safepoint() { this.setEventType(GCEventType.SAFEPOINT); } public double getTimeToEnter() { return timeToEnter; } public void setTimeToEnter(double timeToEnter) { this.timeToEnter = timeToEnter; } @Override protected void appendClassSpecificInfo(StringBuilder sb) { sb.append(String.format("Total time for which application threads were stopped: " + "%.3f seconds, Stopping threads took: %.3f seconds", getDuration(), getTimeToEnter())); } @Override public String toDebugString(GCModel model) { StringBuilder sb = new StringBuilder(); appendStartTimestamp(sb, model.getStartTime()); appendStartTime(sb); appendClassSpecificInfo(sb); return sb.toString(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); appendStartTime(sb); appendClassSpecificInfo(sb); return sb.toString(); } }
3,115
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/GCEventBooleanType.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; /* * This class records some boolean types related to a GCEvent. They will be saved in a BitSet of GCEvent, * Each boolean should have the default value false. */ public enum GCEventBooleanType { PROMOTION_FAILED, // this gc is just after a cms or g1 remark GC_AFTER_REMARK, // this event is just after a cms cycle, or last mixed gc of an old cycle in g1, or the Prepare Mixed gc // because no mixed gc will be scheduled GC_AT_END_OF_OLD_CYCLE, // pause of this event should not be included in statistics IGNORE_PAUSE, TO_SPACE_EXHAUSTED, YOUNG_GC_BECOME_FULL_GC, INITIAL_MARK, PREPARE_MIXED; }
3,116
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/ReferenceGC.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; import lombok.Data; import lombok.NoArgsConstructor; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_DOUBLE; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_INT; @Data @NoArgsConstructor public class ReferenceGC { private double softReferenceStartTime = UNKNOWN_DOUBLE; private int softReferenceCount = UNKNOWN_INT; private double softReferencePauseTime = UNKNOWN_DOUBLE; private double weakReferenceStartTime = UNKNOWN_DOUBLE; private int weakReferenceCount = UNKNOWN_INT; private double weakReferencePauseTime = UNKNOWN_DOUBLE; private double finalReferenceStartTime = UNKNOWN_DOUBLE; private int finalReferenceCount = UNKNOWN_INT; private double finalReferencePauseTime = UNKNOWN_DOUBLE; private double phantomReferenceStartTime = UNKNOWN_DOUBLE; private int phantomReferenceCount = UNKNOWN_INT; private int phantomReferenceFreedCount; private double phantomReferencePauseTime = UNKNOWN_DOUBLE; private double jniWeakReferenceStartTime = UNKNOWN_DOUBLE; private double jniWeakReferencePauseTime = UNKNOWN_DOUBLE; }
3,117
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/MemoryArea.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; public enum MemoryArea { EDEN("eden"), SURVIVOR("survivor"), YOUNG("young"), OLD("old"), HUMONGOUS("humongous"), ARCHIVE("archive"), HEAP("heap"), //young + old + humongous METASPACE("metaspace"), // also represents perm CLASS("class"), NONCLASS("nonclass"); public static MemoryArea getMemoryArea(String name) { if (name == null) { return null; } switch (name.trim().toLowerCase()) { case "young": case "parnew": case "defnew": case "psyounggen": return YOUNG; case "eden": return EDEN; case "survivor": case "survivors": return SURVIVOR; case "tenured": case "old": case "psoldgen": case "paroldgen": case "cms": case "ascms": return OLD; case "metaspace": case "perm": return METASPACE; case "class": return CLASS; case "nonclass": return NONCLASS; case "humongous": return HUMONGOUS; case "archive": return ARCHIVE; case "total": case "heap": return HEAP; default: return null; } } private final String name; MemoryArea(String name) { this.name = name; } public String getName() { return name; } }
3,118
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/GCCause.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; import java.util.HashMap; import java.util.List; import java.util.Map; public class GCCause { private String name; private static final Map<String, GCCause> name2cause = new HashMap<>(); public static GCCause SYSTEM_GC = new GCCause("System.gc()"); public static GCCause DIAGNOSTIC_COMMAND = new GCCause("Diagnostic Command"); public static GCCause FULL_GC_ALOT = new GCCause("FullGCALot"); public static GCCause SCAVENGE_ALOT = new GCCause("ScavengeAlot"); public static GCCause ALLOCATION_PROFILER = new GCCause("Allocation Profiler"); public static GCCause JVMTI_FORCE_GC = new GCCause("JvmtiEnv ForceGarbageCollection"); public static GCCause ARCHIVE_SHARE_GC = new GCCause("Full GC for -Xshare:dump"); public static GCCause GC_LOCKER = new GCCause("GCLocker Initiated GC"); public static GCCause HEAP_INSPECTION = new GCCause("Heap Inspection Initiated GC"); public static GCCause HEAP_DUMP = new GCCause("Heap Dump Initiated GC"); public static GCCause NO_GC = new GCCause("No GC"); public static GCCause ALLOCATION_FAILURE = new GCCause("Allocation Failure"); public static GCCause TENURED_GENERATION_FULL = new GCCause("Tenured Generation Full"); public static GCCause METADATA_GENERATION_THRESHOLD = new GCCause("Metadata GC Threshold"); public static GCCause PERMANENT_GENERATION_FULL = new GCCause("Permanent Generation Full"); public static GCCause CMS_GENERATION_FULL = new GCCause("CMS Generation Full"); public static GCCause CMS_INITIAL_MARK = new GCCause("CMS Initial Mark"); public static GCCause CMS_FINAL_REMARK = new GCCause("CMS Final Remark"); public static GCCause CMS_CONCURRENT_MARK = new GCCause("CMS Concurrent Mark"); public static GCCause CMS_FAILURE = new GCCause("CMS Failure"); public static GCCause OLD_GENERATION_EXPANDED_ON_LAST_SCAVENGE = new GCCause("Old Generation Expanded On Last Scavenge"); public static GCCause OLD_GENERATION_TOO_FULL_TO_SCAVENGE = new GCCause("Old Generation Too Full To Scavenge"); public static GCCause ERGONOMICS = new GCCause("Ergonomics"); public static GCCause G1_EVACUATION_PAUSE = new GCCause("G1 Evacuation Pause"); public static GCCause G1_HUMONGOUS_ALLOCATION = new GCCause("G1 Humongous Allocation"); public static GCCause LAST_DITCH_COLLECTION = new GCCause("Last ditch collection"); public static GCCause LAST_GC_CAUSE = new GCCause("ILLEGAL VALUE - last gc cause - ILLEGAL VALUE"); public static GCCause PROMOTION_FAILED = new GCCause("Promotion Failed"); public static GCCause UPDATE_ALLOCATION_CONTEXT_STATS = new GCCause("Update Allocation Context Stats"); public static GCCause WHITEBOX_YOUNG = new GCCause("WhiteBox Initiated Young GC"); public static GCCause WHITEBOX_CONCURRENT_MARK = new GCCause("WhiteBox Initiated Concurrent Mark"); public static GCCause WHITEBOX_FULL = new GCCause("WhiteBox Initiated Full GC"); public static GCCause META_CLEAR_SOFT_REF = new GCCause("Metadata GC Clear Soft References"); public static GCCause TIMER = new GCCause("Timer"); public static GCCause WARMUP = new GCCause("Warmup"); public static GCCause ALLOC_RATE = new GCCause("Allocation Rate"); public static GCCause ALLOC_STALL = new GCCause("Allocation Stall"); public static GCCause PROACTIVE = new GCCause("Proactive"); public static GCCause PREVENTIVE = new GCCause("G1 Preventive Collection"); public static GCCause G1_COMPACTION = new GCCause("G1 Compaction Pause"); public static GCCause UNKNOWN_GCCAUSE = new GCCause("unknown GCCause"); static { name2cause.put("System.gc", SYSTEM_GC); // HACK: sometimes "()" is missing } private GCCause(String name) { this.name = name; name2cause.put(name, this); } public static GCCause getCause(String name) { return name2cause.getOrDefault(name, null); } public String getName() { return name; } public boolean isMetaspaceFullGCCause() { return this == METADATA_GENERATION_THRESHOLD || this == META_CLEAR_SOFT_REF || this == LAST_DITCH_COLLECTION; } private static final List<GCCause> HeapMemoryTriggeredFullGCCauses = List.of(GC_LOCKER, ALLOCATION_FAILURE, ERGONOMICS, G1_HUMONGOUS_ALLOCATION, PROMOTION_FAILED, G1_COMPACTION); public boolean isHeapMemoryTriggeredFullGCCause() { return HeapMemoryTriggeredFullGCCauses.contains(this); } public boolean isSystemGC() { return this == SYSTEM_GC; } public boolean isBad() { return isSystemGC() || isMetaspaceFullGCCause() || isHeapMemoryTriggeredFullGCCause(); } @Override public String toString() { return name; } }
3,119
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/GCMemoryItem.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.util.Objects; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_INT; import static org.eclipse.jifa.gclog.util.Constant.KB2MB; @Data @AllArgsConstructor @NoArgsConstructor public class GCMemoryItem { private MemoryArea area; // memory size in kb private long preUsed = UNKNOWN_INT; private long preCapacity = UNKNOWN_INT; private long postUsed = UNKNOWN_INT; private long postCapacity = UNKNOWN_INT; public GCMemoryItem(MemoryArea area) { this.area = area; } public GCMemoryItem(MemoryArea area, long preUsed, long postUsed, long postCapacity) { this.area = area; this.preUsed = preUsed; this.postUsed = postUsed; this.postCapacity = postCapacity; } public GCMemoryItem(MemoryArea area, long[] memories) { this(area, memories[0], memories[1], memories[2], memories[3]); } public long getMemoryReduction() { return minus(preUsed, postUsed); } /** * unknown value in this or anotherItem will lead result to be unknown. */ public GCMemoryItem merge(GCMemoryItem anotherItem) { if (anotherItem == null) { return new GCMemoryItem(area); } return new GCMemoryItem(area, plus(preUsed, anotherItem.preUsed), plus(preCapacity, anotherItem.preCapacity), plus(postUsed, anotherItem.postUsed), plus(postCapacity, anotherItem.postCapacity)); } /** * unknown value in this will lead result to be unknown. * unknown value in anotherItem are seen as 0 */ public GCMemoryItem mergeIfPresent(GCMemoryItem anotherItem) { if (anotherItem == null) { return this; } return new GCMemoryItem(area, plusIfPresent(preUsed, anotherItem.preUsed), plusIfPresent(preCapacity, anotherItem.preCapacity), plusIfPresent(postUsed, anotherItem.postUsed), plusIfPresent(postCapacity, anotherItem.postCapacity)); } /** * unknown value in this or anotherItem will lead result to be unknown. */ public GCMemoryItem subtract(GCMemoryItem anotherItem) { if (anotherItem == null) { return new GCMemoryItem(area); } return new GCMemoryItem(area, minus(preUsed, anotherItem.preUsed), minus(preCapacity, anotherItem.preCapacity), minus(postUsed, anotherItem.postUsed), minus(postCapacity, anotherItem.postCapacity)); } /** * unknown value in this will lead result to be unknown. * unknown value in anotherItem are seen as 0 */ public GCMemoryItem subtractIfPresent(GCMemoryItem anotherItem) { if (anotherItem == null) { return this; } return new GCMemoryItem(area, minusIfPresent(preUsed, anotherItem.preUsed), minusIfPresent(preCapacity, anotherItem.preCapacity), minusIfPresent(postUsed, anotherItem.postUsed), minusIfPresent(postCapacity, anotherItem.postCapacity)); } public GCMemoryItem updateIfAbsent(GCMemoryItem anotherItem) { if (anotherItem == null) { return this; } return new GCMemoryItem(area, preUsed == UNKNOWN_INT ? anotherItem.preUsed : preUsed, preCapacity == UNKNOWN_INT ? anotherItem.preCapacity : preCapacity, postUsed == UNKNOWN_INT ? anotherItem.postUsed : postUsed, postCapacity == UNKNOWN_INT ? anotherItem.postCapacity : postCapacity); } private static long plus(long x, long y) { if (x == UNKNOWN_INT || y == UNKNOWN_INT) { return UNKNOWN_INT; } return x + y; } private static long plusIfPresent(long x, long y) { if (x == UNKNOWN_INT || y == UNKNOWN_INT) { return x; } return x + y; } private static long minus(long x, long y) { if (x == UNKNOWN_INT || y == UNKNOWN_INT) { return UNKNOWN_INT; } return x >= y ? x - y : 0; } private static long minusIfPresent(long x, long y) { if (x == UNKNOWN_INT || y == UNKNOWN_INT) { return x; } return x >= y ? x - y : 0; } public void multiply(long x) { if (preUsed != UNKNOWN_INT) { preUsed *= x; } if (preCapacity != UNKNOWN_INT) { preCapacity *= x; } if (postUsed != UNKNOWN_INT) { postUsed *= x; } if (postCapacity != UNKNOWN_INT) { postCapacity *= x; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GCMemoryItem item = (GCMemoryItem) o; return preUsed == item.preUsed && preCapacity == item.preCapacity && postUsed == item.postUsed && postCapacity == item.postCapacity && area == item.area; } @Override public int hashCode() { return Objects.hash(area, preUsed, preCapacity, postUsed, postCapacity); } public boolean isEmpty() { return preUsed == UNKNOWN_INT && preCapacity == UNKNOWN_INT && postUsed == UNKNOWN_INT && postCapacity == UNKNOWN_INT; } @Override public String toString() { StringBuilder sb = new StringBuilder(); String area = this.area.toString().toLowerCase(); sb.append((char) (area.charAt(0) - 32)).append(area.substring(1)).append(": "); if (isEmpty()) { sb.append("unknown"); } else { if (preUsed != UNKNOWN_INT) { sb.append((long) (Math.max(0, preUsed) / KB2MB / KB2MB)).append("M"); } if (preCapacity != UNKNOWN_INT) { sb.append('(').append((long) (Math.max(0, preCapacity) / KB2MB / KB2MB)).append("M)"); } if (preUsed != UNKNOWN_INT || preCapacity != UNKNOWN_INT) { sb.append("->"); } if (postUsed != UNKNOWN_INT) { sb.append((long) (Math.max(0, postUsed) / KB2MB / KB2MB)).append('M'); } else { sb.append("unknown"); } if (postCapacity != UNKNOWN_INT) { sb.append('(').append((long) (Math.max(0, postCapacity) / KB2MB / KB2MB)).append("M)"); } } return sb.toString(); } }
3,120
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/CpuTime.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import static org.eclipse.jifa.gclog.util.Constant.UNKNOWN_DOUBLE; @Data @AllArgsConstructor @NoArgsConstructor public class CpuTime { //unit is ms private double user = UNKNOWN_DOUBLE; private double sys = UNKNOWN_DOUBLE; private double real = UNKNOWN_DOUBLE; public String toString() { return String.format("User=%.2fs Sys=%.2fs Real=%.2fs", user / 1000, sys / 1000, real / 1000); } }
3,121
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/GCEventLevel.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; public enum GCEventLevel { EVENT, PHASE, SUBPHASE, }
3,122
0
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event
Create_ds/eclipse-jifa/backend/gc-log-analyzer/src/main/java/org/eclipse/jifa/gclog/event/evnetInfo/GCPause.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.gclog.event.evnetInfo; public enum GCPause { PAUSE, CONCURRENT, PARTIAL // to get its pause time, we need to consider its paused phases }
3,123
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/Master.java
/******************************************************************************** * Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master; import io.reactivex.Single; import io.vertx.config.ConfigRetrieverOptions; import io.vertx.config.ConfigStoreOptions; import io.vertx.core.DeploymentOptions; import io.vertx.core.Promise; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.config.ConfigRetriever; import io.vertx.reactivex.core.AbstractVerticle; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.web.client.WebClient; import org.eclipse.jifa.master.http.HttpServerVerticle; import org.eclipse.jifa.master.service.ServiceVerticle; import org.eclipse.jifa.master.support.WorkerClient; public class Master extends AbstractVerticle implements Constant { private static final String DEV_CONF = "master-config-dev.json"; public static boolean DEV_MODE = false; public static void main(String[] args) { Vertx vertx = Vertx.vertx(); vertx.deployVerticle(Master.class.getName()); } @Override public void start(Promise<Void> startFuture) { String mc = System.getProperty("jifa.master.config", DEV_CONF); DEV_MODE = DEV_CONF.endsWith(mc); ConfigRetriever configRetriever = ConfigRetriever.create(vertx, new ConfigRetrieverOptions().addStore( new ConfigStoreOptions().setType("file") .setConfig(new JsonObject().put("path", mc)))); configRetriever.rxGetConfig().subscribe(masterConfig -> { WorkerClient.init(masterConfig.getJsonObject(WORKER_CONFIG_KEY), WebClient.create(vertx)); // service Single<String> service = vertx.rxDeployVerticle(ServiceVerticle.class.getName(), new DeploymentOptions() .setConfig(masterConfig)); DeploymentOptions httpConfig = new DeploymentOptions().setConfig(masterConfig.getJsonObject(HTTP_VERTICLE_CONFIG_KEY)) .setInstances(Runtime.getRuntime().availableProcessors()); service.flatMap(id -> vertx.rxDeployVerticle(HttpServerVerticle.class.getName(), httpConfig)) .subscribe(id -> startFuture.complete(), startFuture::fail); }, startFuture::fail); } }
3,124
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/Constant.java
/******************************************************************************** * Copyright (c) 2020, 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master; public interface Constant extends org.eclipse.jifa.common.Constant { /** * TOP CONFIG KEY */ String HTTP_VERTICLE_CONFIG_KEY = "http-verticle-config"; String WORKER_CONFIG_KEY = "worker-config"; /** * Database */ String DB_KEYWORD = "database"; String DB_USERNAME = "username"; String DB_PASSWORD = "password"; String DB_URL = "url"; String DB_DRIVER_CLASS_NAME = "com.mysql.jdbc.Driver"; /** * User */ String USER_ID_KEY = "id"; String USER_NAME_KEY = "name"; String USER_IS_ADMIN_KEY = "isAdmin"; String USER_INFO_KEY = "userInfo"; /** * JWT */ String JWT_SUBJECT = "Jifa Master"; String JWT_ISSUER = "Jifa Master"; String JWT_ALGORITHM_HS256 = "HS256"; String JWT_ALGORITHM_HS256_PUBLIC_KEY = "Jifa Master Public Key"; int JWT_EXPIRES_IN_MINUTES = 7 * 24 * 60; // seven days /** * BASE URL */ String BASE = "/jifa-api"; String AUTH = "/auth"; String USER_INFO = "/userInfo"; /** * FILE URL */ String FILES = "/files"; String FILE = "/file"; String PUBLIC_KEY = "/publicKey"; String FILE_SET_SHARED = "/file/setShared"; String FILE_UNSET_SHARED = "/file/unsetShared"; String FILE_DELETE = "/file/delete"; String FILE_BATCH_DELETE = "/file/batchDelete"; String FILE_SYNC = "/file/sync"; String FILE_UPLOAD = "/file/upload"; String TRANSFER_BY_URL = "/file/transferByURL"; String TRANSFER_BY_SCP = "/file/transferBySCP"; String TRANSFER_BY_OSS = "/file/transferByOSS"; String TRANSFER_BY_S3 = "/file/transferByS3"; String TRANSFER_PROGRESS = "/file/transferProgress"; String FILE_UPDATE_DISPLAY_NAME = "/file/updateDisplayName"; String UPLOAD_TO_OSS = "/file/uploadToOSS"; String UPLOAD_TO_OSS_PROGRESS = "/file/uploadToOSSProgress"; String DOWNLOAD = "/file/download"; /** * JOB URL */ String PENDING_JOBS = "/pendingJobs"; /** * HEAP DUMP URL */ String HEAP_DUMP_RELEASE = "/heap-dump/:file/release"; String HEAP_DUMP_COMMON = "/heap-dump/:file/*"; /** * GC LOG URL */ String GCLOG_RELEASE = "/gc-log/:file/release"; String GCLOG_COMMON = "/gc-log/:file/*"; /** * THREAD DUMP URL */ String THREAD_DUMP_RELEASE = "/thread-dump/:file/release"; String THREAD_DUMP_COMMON = "/thread-dump/:file/*"; /** * WORKER URL */ String QUERY_ALL_WORKERS = "/workers"; String WORKER_DISK_CLEANUP = "/worker/diskCleanup"; String HEALTH_CHECK = "/worker/healthCheck"; /** * ADMIN URL */ String ADD_ADMIN = "/admin/add"; String QUERY_ALL_ADMIN = "/admins"; /** * MISC */ String USERNAME = "username"; String PASSWORD = "password"; String PORT = "port"; String SYSTEM_DISK_USAGE = "/system/diskUsage"; String PING = "/system/ping"; /** * K8S CLOUD CONFIG */ String K8S_KEYWORD = "k8s-config"; String K8S_NAMESPACE = "namespace"; String K8S_WORKER_IMAGE = "worker-image"; String K8S_MINIMAL_MEM_REQ = "minimal-mem-req"; String K8S_MASTER_POD_NAME = "master-pod-name"; String K8S_WORKER_PVC_NAME = "worker-pvc-name"; /** * DEV */ String LOCAL_HOST = "localhost"; static String uri(String suffix) { return BASE + suffix; } }
3,125
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/vo/ExtendedFileInfo.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.vo; import lombok.Data; import lombok.EqualsAndHashCode; import org.eclipse.jifa.common.vo.FileInfo; @Data @EqualsAndHashCode(callSuper = true) public class ExtendedFileInfo extends FileInfo { private String displayName; private String userId; private boolean shared; }
3,126
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/vo/PendingJobsResult.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.vo; import lombok.Data; import java.util.List; @Data public class PendingJobsResult { private boolean inProgress; private List<PendingJob> jobs; public PendingJobsResult(boolean inProgress) { this.inProgress = inProgress; } public PendingJobsResult(List<PendingJob> jobs) { this.inProgress = false; this.jobs = jobs; } }
3,127
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/vo/TransferDestination.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.vo; import lombok.Data; @Data public class TransferDestination { private String hostIP; private String directory; private String filename; public TransferDestination(String hostIP, String directory, String filename) { this.hostIP = hostIP; this.directory = directory; this.filename = filename; } }
3,128
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/vo/PendingJob.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.vo; import lombok.Data; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobType; @Data public class PendingJob { private String userId; private JobType type; private String target; private long creationTime; public PendingJob(Job job) { this.userId = job.getUserId(); this.type = job.getType(); this.target = job.getTarget(); this.creationTime = job.getCreationTime(); } }
3,129
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/vo/UserToken.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.vo; import lombok.Data; @Data public class UserToken { private String token; public UserToken(String token) { this.token = token; } }
3,130
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/Master.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class Master extends Entity { public static Master NOT_FOUND = notFoundInstance(Master.class); public boolean leader; private String hostIP; private String hostName; public Master() { } public Master(JsonObject json) { MasterConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); MasterConverter.toJson(this, result); return result; } }
3,131
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/Admin.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class Admin extends Entity { public static Admin NOT_FOUND = notFoundInstance(Admin.class); private String userId; public Admin() { } public Admin(JsonObject json) { AdminConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); AdminConverter.toJson(this, result); return result; } }
3,132
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/Job.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.entity.enums.JobType; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class Job extends Entity { public static Job NOT_FOUND = notFoundInstance(Job.class); private String userId; private JobType type; private JobState state; private String hostIP; private String target; private String attachment; private long estimatedLoad; private boolean immediate = false; private boolean keepAlive; private long accessTime; public Job() { } public Job(JsonObject json) { JobConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); JobConverter.toJson(this, result); return result; } }
3,133
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/Worker.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class Worker extends Entity { public static Worker NOT_FOUND = notFoundInstance(Worker.class); private String hostIP; private String hostName; private long currentLoad; private long maxLoad; private long cpuCount; private long memoryUsed; private long memoryTotal; private long diskUsed; private long diskTotal; public Worker() { } public Worker(JsonObject json) { WorkerConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); WorkerConverter.toJson(this, result); return result; } }
3,134
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/File.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; import org.eclipse.jifa.common.enums.FileTransferState; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.model.TransferWay; import java.util.Map; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class File extends Entity { public static File NOT_FOUND = notFoundInstance(File.class); private String userId; private String originalName; private String name; private String displayName; private FileType type; private long size; private FileTransferState transferState; private boolean shared; private boolean downloadable; private boolean inSharedDisk; private String hostIP; private boolean deleted; private Deleter deleter; private long deletedTime; private TransferWay transferWay; private Map<String, String> transferInfo; public File() { } public File(JsonObject json) { FileConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); FileConverter.toJson(this, result); return result; } public boolean transferred() { return transferState == FileTransferState.SUCCESS; } }
3,135
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/Config.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class Config extends Entity { public static Config NOT_FOUND = notFoundInstance(Config.class); private String name; private String value; public Config() { } public Config(JsonObject json) { ConfigConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); ConfigConverter.toJson(this, result); return result; } }
3,136
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/GlobalLock.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.json.JsonObject; import lombok.Data; import lombok.EqualsAndHashCode; @Data @EqualsAndHashCode(callSuper = false) @DataObject(generateConverter = true) public class GlobalLock extends Entity { public static GlobalLock NOT_FOUND = notFoundInstance(GlobalLock.class); private String name; public GlobalLock() { } public GlobalLock(JsonObject json) { GlobalLockConverter.fromJson(json, this); } public JsonObject toJson() { JsonObject result = new JsonObject(); GlobalLockConverter.toJson(this, result); return result; } }
3,137
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/Entity.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity; import lombok.Data; import org.eclipse.jifa.common.JifaException; import java.lang.reflect.Constructor; @Data public abstract class Entity { private static long NOT_FOUND_RECORD_ID = -1; private long id; private long lastModifiedTime; private long creationTime; static <R extends Entity> R notFoundInstance(Class<R> clazz) { try { Constructor<R> constructor = clazz.getConstructor(); R record = constructor.newInstance(); record.setId(NOT_FOUND_RECORD_ID); return record; } catch (Throwable t) { throw new JifaException(t); } } public boolean found() { return getId() != NOT_FOUND_RECORD_ID; } public boolean notFound() { return !found(); } }
3,138
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/package-info.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ @ModuleGen(groupPackage = "org.eclipse.jifa.master.entity", name = "Entity") package org.eclipse.jifa.master.entity; import io.vertx.codegen.annotations.ModuleGen;
3,139
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/enums/JobType.java
/******************************************************************************** * Copyright (c) 2020, 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity.enums; import org.eclipse.jifa.common.enums.FileType; public enum JobType { FILE_TRANSFER, HEAP_DUMP_ANALYSIS, GCLOG_ANALYSIS, THREAD_DUMP_ANALYSIS; public boolean isFileTransfer() { return this == FILE_TRANSFER; } public String getTag() { switch (this) { case HEAP_DUMP_ANALYSIS: return FileType.HEAP_DUMP.getTag(); case GCLOG_ANALYSIS: return FileType.GC_LOG.getTag(); case THREAD_DUMP_ANALYSIS: return FileType.THREAD_DUMP.getTag(); default: throw new IllegalStateException(); } } }
3,140
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/enums/Deleter.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity.enums; public enum Deleter { USER, ADMIN, SYSTEM }
3,141
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/entity/enums/JobState.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.entity.enums; public enum JobState { PENDING, IN_PROGRESS, FINISHED }
3,142
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/Pattern.java
/******************************************************************************** * Copyright (c) 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; public enum Pattern { DEFAULT, K8S; }
3,143
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/Factory.java
/******************************************************************************** * Copyright (c) 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; public class Factory { public static WorkerScheduler create(Pattern pattern) { switch (pattern) { case K8S: return new K8SWorkerScheduler(); case DEFAULT: default: return new DefaultWorkerScheduler(); } } }
3,144
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/Utils.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; /** * Commonly used across master side */ public class Utils { // TODO: current algorithm used isn't good enough public static long calculateLoadFromSize(double size) { double G = 1024 * 1024 * 1024; long load = (long) Math.ceil(size / G) * 10; load = Math.max(load, 10); load = Math.min(load, 900); return load; } // Roughly a reverse operation of calculateLoad public static double calculateSizeFromLoad(long size) { long estimateLoad = size; estimateLoad = Math.max(10, estimateLoad); return estimateLoad / 10.0; } }
3,145
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/WorkerClient.java
/******************************************************************************** * Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; import io.reactivex.Single; import io.vertx.core.http.HttpMethod; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.MultiMap; import io.vertx.reactivex.core.buffer.Buffer; import io.vertx.reactivex.core.http.HttpServerRequest; import io.vertx.reactivex.ext.web.client.HttpRequest; import io.vertx.reactivex.ext.web.client.HttpResponse; import io.vertx.reactivex.ext.web.client.WebClient; import io.vertx.reactivex.ext.web.multipart.MultipartForm; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.master.Constant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.Map; import static org.eclipse.jifa.master.Constant.*; public class WorkerClient { private static final Logger LOGGER = LoggerFactory.getLogger(WorkerClient.class); private static String USERNAME; private static String PASSWORD; private static int PORT; private static WebClient client; public static void init(JsonObject config, WebClient client) { USERNAME = config.getString(Constant.USERNAME); PASSWORD = config.getString(Constant.PASSWORD); PORT = config.getInteger(Constant.PORT); WorkerClient.client = client; } public static Single<HttpResponse<Buffer>> post(String hostIP, String uri, Map<String, String> params) { return send(HttpMethod.POST, hostIP, PORT, uri, params); } public static Single<HttpResponse<Buffer>> post(String hostIP, String uri) { return send(HttpMethod.POST, hostIP, PORT, uri, (MultiMap) null); } public static Single<HttpResponse<Buffer>> post(String hostIP, String uri, MultiMap params) { return send(HttpMethod.POST, hostIP, PORT, uri, params); } public static Single<HttpResponse<Buffer>> get(String hostIP, String uri) { return send(HttpMethod.GET, hostIP, PORT, uri, (MultiMap) null); } public static Single<HttpResponse<Buffer>> get(String hostIP, String uri, Map<String, String> params) { return send(HttpMethod.GET, hostIP, PORT, uri, params); } public static Single<HttpResponse<Buffer>> get(String hostIP, String uri, MultiMap params) { return send(HttpMethod.GET, hostIP, PORT, uri, params); } public static Single<HttpResponse<Buffer>> send(HttpServerRequest request, String hostIP) { return send(request, hostIP, PORT); } public static Single<HttpResponse<Buffer>> send(HttpServerRequest request, String hostIP, int port) { return send(request.method(), hostIP, port, request.uri(), request.params()); } private static Single<HttpResponse<Buffer>> send(HttpMethod method, String hostIP, int port, String uri, Map<String, String> params) { return send(method, hostIP, port, uri, MultiMap.caseInsensitiveMultiMap().addAll(params)); } private static Single<HttpResponse<Buffer>> send(HttpMethod method, String hostIP, int port, String uri, MultiMap params) { return send(request(method, hostIP, port, uri), method == HttpMethod.POST, params); } public static Single<HttpResponse<Buffer>> send(HttpRequest<Buffer> request, boolean post, MultiMap params) { request.basicAuthentication(USERNAME, PASSWORD); if (post) { if (params == null) { return request.rxSend(); } return request.rxSendForm(params); } if (params != null) { request.queryParams().addAll(params); } return request.rxSend(); } public static Single<HttpResponse<Buffer>> uploadFile(String hostIp, File file, String name, FileType type) { HttpRequest<Buffer> request = request(HttpMethod.POST, hostIp, PORT, uri(FILE_UPLOAD)); MultipartForm formDataParts = MultipartForm.create(); formDataParts.attribute("fileName", name) .attribute("type", type.name()) .binaryFileUpload(file.getName(), file.getName(), file.getPath(), "application/octet-stream"); return request.rxSendMultipartForm(formDataParts); } private static HttpRequest<Buffer> request(HttpMethod method, String hostIP, int port, String uri) { if (method == HttpMethod.GET) { return client.get(port, hostIP, uri); } else if (method == HttpMethod.POST) { return client.post(port, hostIP, uri); } LOGGER.error("Unsupported worker http request method {}", method); throw new IllegalArgumentException(); } public static HttpRequest<Buffer> request(HttpMethod method, String hostIP, String uri) { return request(method, hostIP, PORT, uri); } }
3,146
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/K8SWorkerScheduler.java
/******************************************************************************** * Copyright (c) 2021,2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; import io.kubernetes.client.custom.Quantity; import io.kubernetes.client.openapi.ApiClient; import io.kubernetes.client.openapi.ApiException; import io.kubernetes.client.openapi.Configuration; import io.kubernetes.client.openapi.apis.CoreV1Api; import io.kubernetes.client.openapi.models.*; import io.kubernetes.client.util.Config; import io.reactivex.Completable; import io.reactivex.Single; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.sql.SQLConnection; import io.vertx.serviceproxy.ServiceException; import org.apache.commons.codec.digest.DigestUtils; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.JifaException; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.model.WorkerInfo; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.task.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.ConnectException; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; import static org.eclipse.jifa.master.Constant.*; import static org.eclipse.jifa.master.Constant.K8S_WORKER_PVC_NAME; public class K8SWorkerScheduler implements WorkerScheduler { private static final Logger LOGGER = LoggerFactory.getLogger(K8SWorkerScheduler.class); private static final String WORKER_PREFIX = "jifa-worker"; private static final String SPECIAL_WORKER_PREFIX = "jifa-special"; private static String NAMESPACE; private static String WORKER_IMAGE; private static CoreV1Api api; private static long MINIMAL_MEM_REQ; private static String MASTER_POD_NAME; private static String WORKER_PVC_NAME; private static V1Pod createWorker(String name, long requestMemSize) { requestMemSize = Math.max(requestMemSize, MINIMAL_MEM_REQ); V1Volume volume = new V1Volume(); volume.setName("dumpfile-volume"); volume.persistentVolumeClaim(new V1PersistentVolumeClaimVolumeSource().claimName(WORKER_PVC_NAME)); V1Pod npod; npod = new V1PodBuilder() .withNewMetadata() .withName(name) .withLabels(new HashMap<String, String>() {{ put("name", "jifa-worker"); }}) .endMetadata() .withNewSpec() .addNewContainer() .withResources( new V1ResourceRequirements() .requests(Map.of("memory", new Quantity(String.valueOf(requestMemSize)))) ) .withName("my-jifa-worker") .withImage(WORKER_IMAGE) .withPorts( new V1ContainerPort() .containerPort(8102) ) .withVolumeMounts( new V1VolumeMount() .mountPath("/root") .name("dumpfile-volume") ) .endContainer() .withVolumes(volume) .endSpec() .build(); try { npod = api.createNamespacedPod(NAMESPACE, npod, null, null, null); } catch (ApiException e) { e.printStackTrace(); } return npod; } public static String getNormalWorkerPrefix() { return WORKER_PREFIX; } public static String getSpecialWorkerPrefix() { return SPECIAL_WORKER_PREFIX; } private static List<V1Pod> listWorker() { List<V1Pod> pods = null; try { V1PodList list = api.listNamespacedPod(NAMESPACE, null, null, null, null, null, null, null, null, null); pods = list.getItems(); } catch (ApiException e) { e.printStackTrace(); } return pods; } private static V1Pod removeWorker(String name) { V1Pod npod = null; try { npod = api.deleteNamespacedPod(name, NAMESPACE, null, null, 0, null, null, null); } catch (ApiException e) { e.printStackTrace(); } return npod; } @Override public void initialize(Pivot pivot, Vertx vertx, JsonObject config) { new RetiringTask(pivot, vertx); new TransferJobResultFillingTask(pivot, vertx); new PVCCleanupTask(pivot, vertx); new StopAbnormalWorkerTask(pivot, vertx); new FileSyncForK8STask(pivot, vertx); // Order is important ApiClient client; try { client = Config.defaultClient(); Configuration.setDefaultApiClient(client); api = new CoreV1Api(); JsonObject k8sConfig = config.getJsonObject(K8S_KEYWORD); NAMESPACE = k8sConfig.getString(K8S_NAMESPACE); WORKER_IMAGE = k8sConfig.getString(K8S_WORKER_IMAGE); MINIMAL_MEM_REQ = k8sConfig.getLong(K8S_MINIMAL_MEM_REQ); MASTER_POD_NAME = k8sConfig.getString(K8S_MASTER_POD_NAME); WORKER_PVC_NAME = k8sConfig.getString(K8S_WORKER_PVC_NAME); LOGGER.info("K8S Namespace: " + NAMESPACE + ", Image: " + WORKER_IMAGE + ", Minimal memory request:" + MINIMAL_MEM_REQ); } catch (IOException e) { e.printStackTrace(); } } @Override public Single<Worker> decide(Job job, SQLConnection conn) { String name = buildWorkerName(job); WorkerInfo workerInfo = getWorkerInfo(name); if (workerInfo == null) { Worker none = Worker.NOT_FOUND; none.setHostName(name); return Single.just(none); } else { String workerIp = getWorkerInfo(name).getIp(); Worker handmake = new Worker(); handmake.setHostIP(workerIp); handmake.setHostName(name); return Single.just(handmake); } } @Override public boolean supportPendingJob() { return false; } private String buildWorkerName(Job job) { String target = job.getTarget(); if (target.startsWith(SPECIAL_WORKER_PREFIX)) { return target; } else { target = DigestUtils.md5Hex(job.getTarget().getBytes(StandardCharsets.UTF_8)).substring(0, 16); return WORKER_PREFIX + "-" + target; } } @Override public Completable start(Job job) { String name = buildWorkerName(job); Map<String, String> config = new HashMap<>(); double fileSizeGb = Utils.calculateSizeFromLoad(job.getEstimatedLoad()); fileSizeGb *= 1.3; // occupy 130% memory of filesize fileSizeGb = Math.min(fileSizeGb, 18.0); // limit to 18g long fileSizeKb = (long) (fileSizeGb * 1024 * 1024 * 1024); // convert gb to kb config.put("requestMemSize", Long.toString(fileSizeKb)); schedule(name, config); String workerIp = getWorkerInfo(name).getIp(); if (workerIp == null) { // Front-end would retry original request until worker pod has been started or // timeout threshold reached. return Completable.error(new ServiceException(ErrorCode.RETRY.ordinal(), job.getTarget())); } final String MSG_RETRY = "RETRY"; final String MSG_OK = "OK"; return WorkerClient.get(workerIp, uri(PING)) .flatMap(resp -> Single.just(MSG_OK)) .onErrorReturn(err -> { if (err instanceof ConnectException) { // ConnectionException is tolerable because it simply indicates worker is still // starting return MSG_RETRY; } else if (err instanceof IOException) { if (err.getMessage() != null && err.getMessage().contains("Connection reset by peer")) { return MSG_RETRY; } } return err.getMessage(); }).flatMapCompletable(msg -> { if (msg.equals(MSG_OK)) { return Completable.complete(); } else if (msg.equals(MSG_RETRY)) { return Completable.error(new ServiceException(ErrorCode.RETRY.ordinal(), job.getTarget())); } else { return Completable.error(new JifaException("Can not start worker due to internal error: " + msg)); } }); } private void schedule(String id, Map<String, String> config) { long requestMemSize = 0L; String tmp = config.get("requestMemSize"); if (tmp != null) { requestMemSize = Long.parseLong(tmp); } if (getWorkerInfo(id) != null) { LOGGER.debug("Create worker {} but it already exists", id); } else { LOGGER.debug("Create worker {} [MemRequest: {}bytes]", id, requestMemSize); createWorker(id, requestMemSize); } } @Override public Completable stop(Job job) { return Completable.fromAction(() -> { String id = buildWorkerName(job); if (getWorkerInfo(id) == null) { LOGGER.debug("Stop worker " + id + " but it does not exist"); } else { LOGGER.debug("Stop worker " + id); removeWorker(id); } }); } @Override public Completable stop(Worker worker) { return Completable.fromAction(() -> { String id = worker.getHostName(); if (getWorkerInfo(id) == null) { LOGGER.debug("Stop worker " + id + " but it does not exist"); } else { LOGGER.debug("Stop worker " + id); removeWorker(id); } }); } @Override public Single<List<Worker>> list() { List<V1Pod> pods = listWorker(); if (pods != null) { List<Worker> workers = pods.stream().map(pod -> { Worker w = new Worker(); w.setHostName(pod.getMetadata().getName()); w.setHostIP(pod.getStatus().getPodIP()); return w; }).collect(Collectors.toList()); return Single.just(workers); } return Single.just(new ArrayList<>() {{ add(Worker.NOT_FOUND); }}); } private WorkerInfo getWorkerInfo(String id) { V1Pod npod = null; try { npod = api.readNamespacedPod(id, NAMESPACE, null, null, null); } catch (ApiException ignored) { } if (null != npod) { WorkerInfo info = new WorkerInfo(); info.setName(id); info.setIp(Objects.requireNonNull(npod.getStatus()).getPodIP()); return info; } else { return null; } } }
3,147
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/WorkerScheduler.java
/******************************************************************************** * Copyright (c) 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; import io.reactivex.Completable; import io.reactivex.Single; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.sql.SQLConnection; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.service.impl.Pivot; import java.util.List; import java.util.Map; public interface WorkerScheduler { /** * init the scheduler */ void initialize(Pivot pivot, Vertx vertx, JsonObject config); /** * @param job related job * @param conn sql connection * @return worker to run the job */ Single<Worker> decide(Job job, SQLConnection conn); /** * @return true is the scheduler supports pending job */ boolean supportPendingJob(); /** * @param job start the worker by job */ Completable start(Job job); /** * stop the worker by job */ Completable stop(Job job); /** * stop the worker by Worker entity */ Completable stop(Worker worker); /** * List existing workers * * @return list of worker */ Single<List<Worker>>list(); }
3,148
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/support/DefaultWorkerScheduler.java
/******************************************************************************** * Copyright (c) 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.support; import io.reactivex.Completable; import io.reactivex.Single; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.sql.SQLConnection; import org.eclipse.jifa.common.JifaException; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.task.DiskCleaningTask; import org.eclipse.jifa.master.task.DiskUsageUpdatingTask; import org.eclipse.jifa.master.task.FileSyncTask; import org.eclipse.jifa.master.task.RetiringTask; import org.eclipse.jifa.master.task.SchedulingTask; import org.eclipse.jifa.master.task.TransferJobResultFillingTask; import java.util.List; import java.util.Map; public class DefaultWorkerScheduler implements WorkerScheduler { private Pivot pivot; @Override public void initialize(Pivot pivot, Vertx vertx, JsonObject configs) { this.pivot = pivot; if (pivot.isLeader()) { new DiskCleaningTask(pivot, vertx); new RetiringTask(pivot, vertx); pivot.setSchedulingTask(new SchedulingTask(pivot, vertx)); new TransferJobResultFillingTask(pivot, vertx); new DiskUsageUpdatingTask(pivot, vertx); new FileSyncTask(pivot, vertx); } } @Override public Single<Worker> decide(Job job, SQLConnection conn) { return pivot.decideWorker(conn, job); } @Override public boolean supportPendingJob() { return true; } @Override public Completable start(Job job) { return Completable.complete(); } @Override public Completable stop(Job job) { return Completable.complete(); } @Override public Completable stop(Worker worker) { throw new JifaException("Unimplemented"); } @Override public Single<List<Worker>> list() { throw new JifaException("Unimplemented"); } }
3,149
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/UserRoute.java
/******************************************************************************** * Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.reactivex.Single; import io.vertx.core.json.JsonObject; import io.vertx.ext.auth.JWTOptions; import io.vertx.ext.auth.PubSecKeyOptions; import io.vertx.ext.auth.jwt.JWTAuthOptions; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.core.http.Cookie; import io.vertx.reactivex.ext.auth.jwt.JWTAuth; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import io.vertx.reactivex.ext.web.handler.JWTAuthHandler; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.model.User; import org.eclipse.jifa.master.vo.UserToken; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class UserRoute implements Constant { private static final Logger LOGGER = LoggerFactory.getLogger(UserRoute.class); private static final String EXCLUDE_URI = String.join("|", HEALTH_CHECK, AUTH); private static final String EXCLUDE_ROUTE_REGEX = "^(?!" + EXCLUDE_URI +"$).*"; private JWTAuth jwtAuth; private JWTOptions jwtOptions; void init(Vertx vertx, JsonObject config, Router apiRouter) { // symmetric is not safe, but it's enough now... PubSecKeyOptions pubSecKeyOptions = new PubSecKeyOptions(); pubSecKeyOptions.setAlgorithm(JWT_ALGORITHM_HS256) .setBuffer(JWT_ALGORITHM_HS256_PUBLIC_KEY); jwtAuth = JWTAuth.create(vertx, new JWTAuthOptions().addPubSecKey(pubSecKeyOptions)); jwtOptions = new JWTOptions(); jwtOptions.setSubject(JWT_SUBJECT).setIssuer(JWT_ISSUER).setExpiresInMinutes(JWT_EXPIRES_IN_MINUTES); apiRouter.routeWithRegex(EXCLUDE_ROUTE_REGEX).handler(this::authWithCookie); apiRouter.routeWithRegex(EXCLUDE_ROUTE_REGEX).handler(JWTAuthHandler.create(jwtAuth)); apiRouter.post().path(AUTH).handler(this::auth); apiRouter.routeWithRegex(EXCLUDE_ROUTE_REGEX).handler(this::extractInfo); apiRouter.get().path(USER_INFO).handler(this::userInfo); } private void authWithCookie(RoutingContext context) { Cookie authCookie = context.request().getCookie(COOKIE_AUTHORIZATION); if (!context.request().headers().contains(HEADER_AUTHORIZATION) && authCookie != null) { context.request().headers().add(HEADER_AUTHORIZATION, HEADER_AUTHORIZATION_PREFIX + authCookie.getValue()); } context.next(); } private void auth(RoutingContext context) { Single.just(context.request()) .flatMap(req -> { String username = req.getParam("username"); String password = req.getParam("password"); if ("admin".equals(username) && "admin".equals(password)) { return Single.just(new JsonObject() .put(USER_ID_KEY, "12345") .put(USER_NAME_KEY, "admin") .put(Constant.USER_IS_ADMIN_KEY, true)) .map(userInfo -> jwtAuth.generateToken(userInfo, jwtOptions)); } else { return Single.just(""); } }).subscribe(token -> HTTPRespGuarder.ok(context, new UserToken(token)), t -> HTTPRespGuarder.fail(context, t)); } private void extractUserInfo(RoutingContext context) { JsonObject principal = context.user().principal(); User user = new User(principal.getString(USER_ID_KEY), principal.getString(USER_NAME_KEY), principal.getBoolean(USER_IS_ADMIN_KEY)); context.put(USER_INFO_KEY, user); } private void saveAuthorizationCookie(RoutingContext context) { Cookie authCookie = context.getCookie(COOKIE_AUTHORIZATION); String authHeader = context.request().getHeader(HEADER_AUTHORIZATION); if (authHeader != null && authHeader.startsWith(HEADER_AUTHORIZATION_PREFIX)) { // cookie can not have ' ', so we save substring here authHeader = authHeader.substring(HEADER_AUTHORIZATION_PREFIX.length()); if (authCookie == null || !authHeader.equals(authCookie.getValue())) { Cookie cookie = Cookie.cookie(COOKIE_AUTHORIZATION, authHeader); cookie.setPath("/"); context.addCookie(cookie); } } } private void extractInfo(RoutingContext context) { saveAuthorizationCookie(context); extractUserInfo(context); context.next(); } private void userInfo(RoutingContext context) { HTTPRespGuarder.ok(context, context.get(USER_INFO_KEY)); } }
3,150
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/WorkerRoute.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.reactivex.annotations.NonNull; import io.reactivex.functions.BiPredicate; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.core.http.HttpServerRequest; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import io.vertx.serviceproxy.ServiceException; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.JifaException; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.model.User; import org.eclipse.jifa.master.service.ProxyDictionary; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.reactivex.SupportService; import org.eclipse.jifa.master.service.reactivex.WorkerService; import org.eclipse.jifa.master.support.K8SWorkerScheduler; import java.util.concurrent.TimeUnit; import static org.eclipse.jifa.common.util.Assertion.ASSERT; class WorkerRoute extends BaseRoute { private WorkerService workerService; private SupportService supportService; void init(Vertx vertx, JsonObject config, Router apiRouter) { workerService = ProxyDictionary.lookup(WorkerService.class); supportService = ProxyDictionary.lookup(SupportService.class); apiRouter.get().path(Constant.QUERY_ALL_WORKERS).handler(this::queryAll); apiRouter.post().path(Constant.WORKER_DISK_CLEANUP).handler(this::diskCleanup); if (!Pivot.getInstance().isDefaultPattern()) { assert Pivot.getInstance().getScheduler() instanceof K8SWorkerScheduler : "unexpected scheduler"; apiRouter.get().path(Constant.HEALTH_CHECK).handler(this::healthCheck); } } private void queryAll(RoutingContext context) { User user = context.get(Constant.USER_INFO_KEY); ASSERT.isTrue(user.isAdmin(), ErrorCode.FORBIDDEN); workerService.rxQueryAll() .subscribe( workers -> HTTPRespGuarder.ok(context, workers), t -> HTTPRespGuarder.fail(context, t) ); } private void diskCleanup(RoutingContext context) { User user = context.get(Constant.USER_INFO_KEY); ASSERT.isTrue(user.isAdmin(), ErrorCode.FORBIDDEN); HttpServerRequest request = context.request(); String hostIP = request.getParam("host_ip"); workerService.rxDiskCleanup(hostIP).subscribe( () -> HTTPRespGuarder.ok(context, "ok"), t -> HTTPRespGuarder.fail(context, t)); } private void healthCheck(RoutingContext context) { supportService.rxIsDBConnectivity() .onErrorReturn(e -> Boolean.FALSE) .subscribe(connectivity -> { if (!connectivity) { HTTPRespGuarder.fail(context, new JifaException("Can not connect to DB")); } else { supportService.rxStartDummyWorker() .retry( // Note, http request has its own timeout mechanism, and we can not re-send health check // since liveliness probe is a one-shot test, it's safe to use stream retry API. new RetryStartingWorker(30)) .andThen(supportService.rxStopDummyWorker()) .subscribe(() -> HTTPRespGuarder.ok(context, "SUCCESS"), e -> HTTPRespGuarder.fail(context, new JifaException("Can not start testing worker due to " + e))); } }, e -> HTTPRespGuarder.fail(context, e)); } private static class RetryStartingWorker implements BiPredicate<Integer, Throwable> { private final int retryLimit; public RetryStartingWorker(int retryLimit) { this.retryLimit = retryLimit; } @Override public boolean test(@NonNull Integer integer, @NonNull Throwable ex) throws Exception { if (integer < retryLimit) { if (ex instanceof ServiceException) { ServiceException se = (ServiceException) ex; int failureCode = se.failureCode(); if (failureCode == ErrorCode.RETRY.ordinal()) { TimeUnit.SECONDS.sleep(1); return true; } } } return false; } } }
3,151
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/HttpServerVerticle.java
/******************************************************************************** * Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.reactivex.Single; import io.vertx.core.Promise; import io.vertx.reactivex.core.AbstractVerticle; import io.vertx.reactivex.core.http.HttpServerResponse; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import io.vertx.reactivex.ext.web.client.WebClient; import io.vertx.reactivex.ext.web.handler.BodyHandler; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.master.Constant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HttpServerVerticle extends AbstractVerticle implements Constant { private static final Logger LOGGER = LoggerFactory.getLogger(HttpServerVerticle.class); private WebClient client; @Override public void start(Promise<Void> startFuture) { vertx.rxExecuteBlocking(future -> { client = WebClient.create(vertx); // base Router router = Router.router(vertx); router.errorHandler(Constant.HTTP_INTERNAL_SERVER_ERROR_STATUS_CODE, this::error); router.errorHandler(Constant.HTTP_BAD_REQUEST_STATUS_CODE, this::error); // jifa api Router apiRouter = Router.router(vertx); router.mountSubRouter(BASE, apiRouter); apiRouter.post().handler(BodyHandler.create()); new UserRoute().init(vertx, config(), apiRouter); new JobRoute().init(vertx, config(), apiRouter); new AdminRoute().init(vertx, config(), apiRouter); new WorkerRoute().init(vertx, config(), apiRouter); new FileRoute().init(vertx, config(), apiRouter); new AnalyzerRoute().init(vertx, config(), apiRouter); Integer port = config().getInteger("port"); vertx.createHttpServer().requestHandler(router).rxListen(port).subscribe(s -> { LOGGER.info("Master-Http-Server-Verticle started successfully, port is {}", port); future.complete(Single.just(this)); }, future::fail); }).subscribe(f -> startFuture.complete(), startFuture::fail); } void error(RoutingContext context) { Throwable failure = context.failure(); HttpServerResponse response = context.response(); if (failure != null && !response.ended() && !response.closed()) { HTTPRespGuarder.fail(context, failure); } } }
3,152
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/FileRoute.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.reactivex.Single; import io.vertx.core.http.HttpMethod; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.MultiMap; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.core.buffer.Buffer; import io.vertx.reactivex.core.http.HttpServerRequest; import io.vertx.reactivex.ext.web.FileUpload; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import io.vertx.reactivex.ext.web.client.HttpRequest; import io.vertx.reactivex.ext.web.codec.BodyCodec; import org.apache.commons.lang.StringUtils; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.JifaException; import org.eclipse.jifa.common.enums.FileTransferState; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.common.enums.ProgressState; import org.eclipse.jifa.common.util.FileUtil; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.common.vo.FileInfo; import org.eclipse.jifa.common.vo.PageView; import org.eclipse.jifa.common.vo.TransferProgress; import org.eclipse.jifa.common.vo.TransferringFile; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.model.TransferWay; import org.eclipse.jifa.master.model.User; import org.eclipse.jifa.master.service.ProxyDictionary; import org.eclipse.jifa.master.service.reactivex.FileService; import org.eclipse.jifa.master.service.reactivex.JobService; import org.eclipse.jifa.master.support.WorkerClient; import org.eclipse.jifa.master.vo.ExtendedFileInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.eclipse.jifa.common.util.Assertion.ASSERT; import static org.eclipse.jifa.common.util.GsonHolder.GSON; class FileRoute extends BaseRoute implements Constant { private static final Logger LOGGER = LoggerFactory.getLogger(FileRoute.class.getName()); private static String PUB_KEY = Constant.EMPTY_STRING; static { String path = System.getProperty("user.home") + java.io.File.separator + ".ssh" + java.io.File.separator + "jifa-ssh-key.pub"; java.io.File file = new java.io.File(path); if (file.exists()) { PUB_KEY = FileUtil.content(file); } else { LOGGER.warn("SSH public key file {} doesn't exist", file.getAbsolutePath()); } } private FileService fileService; private JobService jobService; private static ExtendedFileInfo buildFileInfo(File file) { ExtendedFileInfo info = new ExtendedFileInfo(); info.setOriginalName(file.getOriginalName()); info.setDisplayName( StringUtils.isBlank(file.getDisplayName()) ? file.getOriginalName() : file.getDisplayName()); info.setName(file.getName()); info.setType(file.getType()); info.setSize(file.getSize()); info.setTransferState(file.getTransferState()); info.setShared(file.isShared()); info.setDownloadable(false); info.setCreationTime(file.getCreationTime()); info.setUserId(file.getUserId()); return info; } void init(Vertx vertx, JsonObject config, Router apiRouter) { fileService = ProxyDictionary.lookup(FileService.class); jobService = ProxyDictionary.lookup(JobService.class); apiRouter.get().path(FILES).handler(this::files); apiRouter.get().path(FILE).handler(this::file); apiRouter.post().path(FILE_DELETE).handler(this::delete); apiRouter.post().path(TRANSFER_BY_URL).handler(context -> transfer(context, TransferWay.URL)); apiRouter.post().path(TRANSFER_BY_SCP).handler(context -> transfer(context, TransferWay.SCP)); apiRouter.post().path(TRANSFER_BY_OSS).handler(context -> transfer(context, TransferWay.OSS)); apiRouter.post().path(TRANSFER_BY_S3).handler(context -> transfer(context, TransferWay.S3)); apiRouter.get().path(TRANSFER_PROGRESS).handler(this::fileTransportProgress); apiRouter.get().path(PUBLIC_KEY).handler(this::publicKey); apiRouter.post().path(FILE_SET_SHARED).handler(this::setShared); apiRouter.post().path(FILE_UNSET_SHARED).handler(this::unsetShared); apiRouter.post().path(FILE_UPDATE_DISPLAY_NAME).handler(this::updateDisplayName); apiRouter.post().path(UPLOAD_TO_OSS).handler(this::uploadToOSS); apiRouter.get().path(UPLOAD_TO_OSS_PROGRESS).handler(this::uploadToOSSProgress); apiRouter.get().path(DOWNLOAD).handler(this::download); apiRouter.post().path(FILE_UPLOAD).handler(this::upload); } private void files(RoutingContext context) { String userId = context.<User>get(USER_INFO_KEY).getId(); int page = Integer.parseInt(context.request().getParam(PAGE)); int pageSize = Integer.parseInt(context.request().getParam(PAGE_SIZE)); FileType type = FileType.valueOf(context.request().getParam(FILE_TYPE)); String expected = context.request().getParam("expectedFilename"); Single<Integer> countSingle = fileService.rxCount(userId, type, expected); Single<List<File>> fileRecordSingle = fileService.rxFiles(userId, type, expected, page, pageSize); Single.zip(countSingle, fileRecordSingle, (count, fileRecords) -> { PageView<FileInfo> pv = new PageView<>(); pv.setTotalSize(count); pv.setPage(page); pv.setPageSize(pageSize); pv.setData(fileRecords.stream().map(FileRoute::buildFileInfo).collect(Collectors.toList())); return pv; }).subscribe(pageView -> HTTPRespGuarder.ok(context, pageView), t -> HTTPRespGuarder.fail(context, t)); } private void file(RoutingContext context) { User user = context.get(USER_INFO_KEY); String name = context.request().getParam("name"); fileService.rxFile(name) .doOnSuccess(this::assertFileAvailable) .doOnSuccess(file -> checkPermission(user, file)) .map(FileRoute::buildFileInfo) .subscribe(fileView -> HTTPRespGuarder.ok(context, fileView), throwable -> HTTPRespGuarder.fail(context, throwable)); } private void delete(RoutingContext context) { User user = context.get(USER_INFO_KEY); String name = context.request().getParam("name"); fileService.rxFile(name) .doOnSuccess(this::assertFileAvailable) .doOnSuccess(file -> checkDeletePermission(user, file)) .doOnSuccess(file -> ASSERT.isTrue(file.getTransferState().isFinal())) .flatMapCompletable( file -> fileService.rxDeleteFile(name, file.getUserId().equals(user.getId()) ? Deleter.USER : Deleter.ADMIN)) .subscribe(() -> HTTPRespGuarder.ok(context), t -> HTTPRespGuarder.fail(context, t)); } private void transfer(RoutingContext context, TransferWay way) { String userId = context.<User>get(USER_INFO_KEY).getId(); HttpServerRequest request = context.request(); String[] paths = way.getPathKeys(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < paths.length; i++) { sb.append(request.getParam(paths[i])); if (i != paths.length - 1) { sb.append("_"); } } String origin = extractOriginalName(sb.toString()); FileType type = FileType.valueOf(context.request().getParam("type")); String name; if (context.request().getParam("retry") != null) { name = context.request().getParam("retry"); } else { name = buildFileName(userId, origin); } request.params().add("fileName", name); fileService.rxTransfer(userId, type, origin, name, way, convert(request.params())) .ignoreElement() .toSingleDefault(name) .subscribe(n -> HTTPRespGuarder.ok(context, new TransferringFile(n)), t -> HTTPRespGuarder.fail(context, t)); } private void fileTransportProgress(RoutingContext context) { String name = context.request().getParam("name"); User user = context.get(USER_INFO_KEY); jobService.rxFindActive(JobType.FILE_TRANSFER, name).flatMap(job -> { if (job.notFound()) { return fileService.rxFile(name) .doOnSuccess(this::assertFileAvailable) .doOnSuccess(file -> checkPermission(user, file)) .flatMap(file -> Single.just(toProgress(file))); } checkPermission(user, job); Single<TransferProgress> progressSingle = WorkerClient.send(context.request(), job.getHostIP()) .doOnSuccess(resp -> ASSERT.isTrue(HTTP_GET_OK_STATUS_CODE == resp.statusCode(), resp::bodyAsString)) .map(resp -> GSON.fromJson(resp.bodyAsString(), TransferProgress.class)); return progressSingle.flatMap(progress -> { ProgressState state = progress.getState(); if (state.isFinal()) { LOGGER.info("File transfer {} done, state is {}", name, progress.getState()); FileTransferState transferState = FileTransferState.fromProgressState(state); return fileService.rxTransferDone(name, transferState, progress.getTotalSize()) .andThen(Single.just(progress)); } return Single.just(progress); }); }).subscribe(p -> HTTPRespGuarder.ok(context, p), t -> HTTPRespGuarder.fail(context, t)); } private void uploadToOSS(RoutingContext context) { String name = context.request().getParam("srcName"); ASSERT.isTrue(StringUtils.isNotBlank(name), ErrorCode.ILLEGAL_ARGUMENT, "srcName mustn't be empty"); User user = context.get(USER_INFO_KEY); fileService.rxFile(name) .doOnSuccess(file -> assertFileAvailable(file)) .doOnSuccess(file -> checkPermission(user, file)) .doOnSuccess(file -> ASSERT.isTrue(file.transferred(), ErrorCode.NOT_TRANSFERRED)) .doOnSuccess(file -> context.request().params().add("type", file.getType().name())) .flatMap(file -> WorkerClient.send(context.request(), file.getHostIP())) .subscribe(resp -> HTTPRespGuarder.ok(context, resp.statusCode(), resp.bodyAsString()), t -> HTTPRespGuarder.fail(context, t)); } private void uploadToOSSProgress(RoutingContext context) { String name = context.request().getParam("name"); ASSERT.isTrue(StringUtils.isNotBlank(name), ErrorCode.ILLEGAL_ARGUMENT, "name mustn't be empty"); User user = context.get(USER_INFO_KEY); fileService.rxFile(name) .doOnSuccess(file -> assertFileAvailable(file)) .doOnSuccess(file -> checkPermission(user, file)) .doOnSuccess(file -> ASSERT.isTrue(file.transferred(), ErrorCode.NOT_TRANSFERRED)) .flatMap(file -> WorkerClient.send(context.request(), file.getHostIP())) .subscribe(resp -> HTTPRespGuarder.ok(context, resp.statusCode(), resp.bodyAsString()), t -> HTTPRespGuarder.fail(context, t)); } private void setShared(RoutingContext context) { String name = context.request().getParam("name"); User user = context.get(USER_INFO_KEY); fileService.rxFile(name) .doOnSuccess(file -> ASSERT.isTrue(file.found(), ErrorCode.FILE_DOES_NOT_EXIST)) .doOnSuccess(file -> checkPermission(user, file)) .ignoreElement() .andThen(fileService.rxSetShared(name)) .subscribe(() -> HTTPRespGuarder.ok(context), t -> HTTPRespGuarder.fail(context, t)); } private void updateDisplayName(RoutingContext context) { String name = context.request().getParam("name"); String displayName = context.request().getParam("displayName"); User user = context.get(USER_INFO_KEY); fileService.rxFile(name) .doOnSuccess(file -> ASSERT.isTrue(file.found(), ErrorCode.FILE_DOES_NOT_EXIST)) .doOnSuccess(file -> checkPermission(user, file)) .ignoreElement() .andThen(fileService.rxUpdateDisplayName(name, displayName)) .subscribe(() -> HTTPRespGuarder.ok(context), t -> HTTPRespGuarder.fail(context, t)); } private void publicKey(RoutingContext context) { HTTPRespGuarder.ok(context, PUB_KEY); } private void unsetShared(RoutingContext context) { HTTPRespGuarder.fail(context, new JifaException(ErrorCode.UNSUPPORTED_OPERATION)); } private String extractOriginalName(String path) { String name = path.substring(path.lastIndexOf(java.io.File.separatorChar) + 1); if (name.contains("?")) { name = name.substring(0, name.indexOf("?")); } name = name.replaceAll("[%\\\\& ]", "_"); if (name.length() == 0) { name = System.currentTimeMillis() + ""; } return name; } private Map<String, String> convert(MultiMap src) { Map<String, String> target = new HashMap<>(); for (Map.Entry<String, String> entry : src) { target.put(entry.getKey(), entry.getValue()); } return target; } private TransferProgress toProgress(File file) { FileTransferState transferState = file.getTransferState(); ASSERT.isTrue(transferState.isFinal(), ErrorCode.SANITY_CHECK); TransferProgress progress = new TransferProgress(); progress.setState(transferState.toProgressState()); progress.setTotalSize(file.getSize()); if (transferState == FileTransferState.SUCCESS) { progress.setPercent(1.0); progress.setTransferredSize(file.getSize()); } return progress; } private void download(RoutingContext context) { String name = context.request().getParam("name"); User user = context.get(USER_INFO_KEY); fileService.rxFile(name) .doOnSuccess(file -> ASSERT.isTrue(file.found(), ErrorCode.FILE_DOES_NOT_EXIST)) .doOnSuccess(file -> checkPermission(user, file)) .flatMap(file -> { context.response() .putHeader(HEADER_CONTENT_LENGTH_KEY, String.valueOf(file.getSize())) .putHeader(HEADER_CONTENT_DISPOSITION, "attachment;filename=" + file.getName()) .putHeader(HEADER_CONTENT_TYPE_KEY, CONTENT_TYPE_FILE_FORM); HttpRequest<Buffer> workerRequest = WorkerClient.request(context.request().method(), file.getHostIP(), context.request().uri()); workerRequest.as(BodyCodec.pipe(context.response())); return WorkerClient.send(workerRequest, context.request().method() == HttpMethod.POST, null); }) .subscribe(resp -> context.response().end(), t -> HTTPRespGuarder.fail(context, t)); } private void upload(RoutingContext context) { FileUpload[] fileUploads = context.fileUploads().toArray(new FileUpload[0]); if (fileUploads.length == 0) { HTTPRespGuarder.ok(context); return; } FileUpload file = fileUploads[0]; String userId = context.<User>get(USER_INFO_KEY).getId(); HttpServerRequest request = context.request(); String origin = file.fileName(); FileType type = FileType.valueOf(context.request().getParam("type")); String name = buildFileName(userId, origin); request.params().add("fileName", name); fileService.rxTransfer(userId, type, origin, name, TransferWay.UPLOAD, convert(request.params())) .flatMap(job -> WorkerClient.uploadFile(job.getHostIP(), new java.io.File(file.uploadedFileName()), name, type)) .subscribe(resp -> { FileTransferState state = resp.statusCode() == Constant.HTTP_POST_CREATED_STATUS ? FileTransferState.SUCCESS : FileTransferState.ERROR; fileService.rxTransferDone(name, state, file.size()) .subscribe( () -> HTTPRespGuarder .ok(context, resp.statusCode(), new TransferringFile(name)), t -> HTTPRespGuarder.fail(context, t) ); context.vertx().executeBlocking( p -> { for (FileUpload f : fileUploads) { context.vertx().fileSystem().delete(f.uploadedFileName()); } } ); }, t -> HTTPRespGuarder.fail(context, t)); } }
3,153
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/JobRoute.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.reactivex.Single; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.service.ProxyDictionary; import org.eclipse.jifa.master.service.reactivex.JobService; import org.eclipse.jifa.master.vo.PendingJob; import org.eclipse.jifa.master.vo.PendingJobsResult; import java.util.ArrayList; import java.util.stream.Collectors; import static org.eclipse.jifa.common.util.Assertion.ASSERT; class JobRoute extends BaseRoute implements Constant { private JobService jobService; void init(Vertx vertx, JsonObject config, Router apiRouter) { jobService = ProxyDictionary.lookup(JobService.class); apiRouter.get().path(PENDING_JOBS).handler(this::frontPendingJobs); } private void frontPendingJobs(RoutingContext context) { JobType type = JobType.valueOf(context.request().getParam("type")); String target = context.request().getParam("target"); jobService.rxFindActive(type, target) .doOnSuccess(this::assertJobExist) .flatMap(job -> { if (job.getState() == JobState.IN_PROGRESS) { return Single.just(new PendingJobsResult(true)); } ASSERT.isTrue(job.getState() == JobState.PENDING, ErrorCode.SANITY_CHECK); return jobService.rxPendingJobsInFrontOf(job) .map(fronts -> { ArrayList<Job> jobs = new ArrayList<>(fronts); jobs.add(job); return jobs; }) .map(fronts -> fronts.stream().map(PendingJob::new).collect(Collectors.toList())) .map(PendingJobsResult::new); }) .subscribe(result -> HTTPRespGuarder.ok(context, result), t -> HTTPRespGuarder.fail(context, t)); } }
3,154
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/AdminRoute.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.model.User; import org.eclipse.jifa.master.service.ProxyDictionary; import org.eclipse.jifa.master.service.reactivex.AdminService; import static org.eclipse.jifa.common.util.Assertion.ASSERT; public class AdminRoute extends BaseRoute { private AdminService adminService; void init(Vertx vertx, JsonObject config, Router apiRouter) { adminService = ProxyDictionary.lookup(AdminService.class); apiRouter.post().path(Constant.ADD_ADMIN).handler(this::add); apiRouter.get().path(Constant.QUERY_ALL_ADMIN).handler(this::queryAll); } private void add(RoutingContext context) { User user = context.get(USER_INFO_KEY); ASSERT.isTrue(user.isAdmin(), ErrorCode.FORBIDDEN); String userId = context.request().getParam("userId"); adminService.rxAdd(userId) .subscribe(() -> HTTPRespGuarder.ok(context), t -> HTTPRespGuarder.fail(context, t)); } private void queryAll(RoutingContext context) { User user = context.get(USER_INFO_KEY); ASSERT.isTrue(user.isAdmin(), ErrorCode.FORBIDDEN); adminService.rxQueryAll() .subscribe(admins -> HTTPRespGuarder.ok(context, admins), t -> HTTPRespGuarder.fail(context, t)); } }
3,155
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/BaseRoute.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import com.google.common.base.Strings; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.model.User; import org.eclipse.jifa.master.vo.PendingJob; import static org.eclipse.jifa.common.util.Assertion.ASSERT; import static org.eclipse.jifa.common.util.GsonHolder.GSON; class BaseRoute implements Constant { static final String SEP = "-"; void assertFileAvailable(File file) { ASSERT.isTrue(file.found(), ErrorCode.FILE_DOES_NOT_EXIST); ASSERT.isTrue(!file.isDeleted(), ErrorCode.FILE_HAS_BEEN_DELETED); } void assertJobExist(Job job) { ASSERT.isTrue(job.found(), ErrorCode.JOB_DOES_NOT_EXIST); } void checkPermission(User user, File file) { ASSERT.isTrue(file.isShared() || file.getUserId().equals(user.getId()) || user.isAdmin(), ErrorCode.FORBIDDEN); } void checkDeletePermission(User user, File file) { ASSERT.isTrue(file.getUserId().equals(user.getId()) || user.isAdmin(), ErrorCode.FORBIDDEN); } void checkPermission(User user, Job job) { ASSERT.isTrue(job.getUserId().equals(user.getId()) || user.isAdmin(), ErrorCode.FORBIDDEN); } void assertJobInProgress(Job job) { ASSERT.isTrue(job.getState() != JobState.PENDING, ErrorCode.PENDING_JOB, () -> GSON.toJson(new PendingJob(job))) .isTrue(job.getState() == JobState.IN_PROGRESS, ErrorCode.SANITY_CHECK); } String buildFileName(String userId, String originalName) { ASSERT.isTrue(!Strings.isNullOrEmpty(userId), ErrorCode.ILLEGAL_ARGUMENT); ASSERT.isTrue(!Strings.isNullOrEmpty(originalName), ErrorCode.ILLEGAL_ARGUMENT); return userId + SEP + System.currentTimeMillis() + SEP + originalName; } }
3,156
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/http/AnalyzerRoute.java
/******************************************************************************** * Copyright (c) 2020, 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.http; import io.reactivex.Single; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.web.Router; import io.vertx.reactivex.ext.web.RoutingContext; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.util.HTTPRespGuarder; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.model.User; import org.eclipse.jifa.master.service.ProxyDictionary; import org.eclipse.jifa.master.service.reactivex.FileService; import org.eclipse.jifa.master.service.reactivex.JobService; import org.eclipse.jifa.master.support.Utils; import org.eclipse.jifa.master.support.WorkerClient; import static org.eclipse.jifa.common.util.Assertion.ASSERT; import static org.eclipse.jifa.master.entity.enums.JobType.*; class AnalyzerRoute extends BaseRoute { private JobService jobService; private FileService fileService; void init(Vertx vertx, JsonObject config, Router apiRouter) { jobService = ProxyDictionary.lookup(JobService.class); fileService = ProxyDictionary.lookup(FileService.class); // heap dump // Do not change the order !! apiRouter.route().path(HEAP_DUMP_RELEASE).handler(context -> release(context, HEAP_DUMP_ANALYSIS)); apiRouter.route().path(HEAP_DUMP_COMMON).handler(context -> process(context, HEAP_DUMP_ANALYSIS)); // gclog apiRouter.route().path(GCLOG_RELEASE).handler(context -> release(context, GCLOG_ANALYSIS)); apiRouter.route().path(GCLOG_COMMON).handler(context -> process(context, GCLOG_ANALYSIS)); // thread dump apiRouter.route().path(THREAD_DUMP_RELEASE).handler(context -> release(context, THREAD_DUMP_ANALYSIS)); apiRouter.route().path(THREAD_DUMP_COMMON).handler(context -> process(context, THREAD_DUMP_ANALYSIS)); } private Single<Job> findOrAllocate(User user, File file, JobType jobType) { String target = file.getName(); return jobService.rxFindActive(jobType, target) .flatMap(job -> job.found() ? Single.just(job) : jobService.rxAllocate(user.getId(), file.getHostIP(), jobType, target, EMPTY_STRING, Utils.calculateLoadFromSize(file.getSize()), false) ); } private void release(RoutingContext context, JobType jobType) { User user = context.get(Constant.USER_INFO_KEY); String fileName = context.request().getParam("file"); fileService.rxFile(fileName) .doOnSuccess(file -> assertFileAvailable(file)) .doOnSuccess(file -> checkPermission(user, file)) .flatMap(file -> jobService.rxFindActive(jobType, file.getName())) .doOnSuccess(this::assertJobExist) .doOnSuccess(job -> ASSERT.isTrue(job.getState() != JobState.PENDING, ErrorCode.RELEASE_PENDING_JOB)) .ignoreElement() .andThen(jobService.rxFinish(jobType, fileName)) .subscribe(() -> HTTPRespGuarder.ok(context), t -> HTTPRespGuarder.fail(context, t)); } private void process(RoutingContext context, JobType jobType) { User user = context.get(Constant.USER_INFO_KEY); context.request().params().add("userName", user.getName()); String fileName = context.request().getParam("file"); fileService.rxFile(fileName) .doOnSuccess(file -> assertFileAvailable(file)) .doOnSuccess(file -> checkPermission(user, file)) .doOnSuccess(file -> ASSERT.isTrue(file.transferred(), ErrorCode.NOT_TRANSFERRED)) .flatMap(file -> findOrAllocate(user, file, jobType)) .doOnSuccess(this::assertJobInProgress) .flatMap(job -> WorkerClient.send(context.request(), job.getHostIP())) .subscribe(resp -> HTTPRespGuarder.ok(context, resp.statusCode(), resp.bodyAsString()), t -> HTTPRespGuarder.fail(context, t)); } }
3,157
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/TransferJobResultFillingTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Completable; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.JobHelper; import java.time.Instant; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.ServiceAssertion.SERVICE_ASSERT; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; import static org.eclipse.jifa.master.service.sql.JobSQL.SELECT_TRANSFER_JOB_TO_FILLING_RESULT; public class TransferJobResultFillingTask extends BaseTask { private static final long MIN_TIMEOUT_THRESHOLD = 5 * 6000; private long timeoutThreshold; public TransferJobResultFillingTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } @Override public String name() { return "Transfer Job Result Filling Task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("JOB-TRANSFER-RESULT-FILLING-INTERVAL")); } @Override public void doInit() { timeoutThreshold = ConfigHelper.getLong(pivot.config("JOB-SMALL-TIMEOUT-THRESHOLD")); // timeout threshold must be greater than or equal to 5 min SERVICE_ASSERT.isTrue(timeoutThreshold >= MIN_TIMEOUT_THRESHOLD, ErrorCode.SANITY_CHECK); } @Override public void doPeriodic() { Instant instant = Instant.now().minusMillis(timeoutThreshold); pivot.getDbClient().rxQueryWithParams(SELECT_TRANSFER_JOB_TO_FILLING_RESULT, ja(instant)) .map(result -> result.getRows().stream().map(JobHelper::fromDBRecord).collect(Collectors.toList())) .doOnSuccess(jobs -> LOGGER.info("Found timeout file transfer jobs: {}", jobs.size())) .map(jobs -> jobs.stream().map(pivot::processTimeoutTransferJob).collect(Collectors.toList())) .flatMapCompletable(Completable::concat) .subscribe( this::end, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } }
3,158
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/StopAbnormalWorkerTask.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Single; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.JobHelper; import org.eclipse.jifa.master.service.sql.JobSQL; import org.eclipse.jifa.master.support.K8SWorkerScheduler; import java.util.List; import java.util.stream.Collectors; public class StopAbnormalWorkerTask extends BaseTask { public StopAbnormalWorkerTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } @Override public String name() { return "Stop Abnormal Worker Task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("TASK-STOP-ABNORMAL-WORKER")); } @Override public void doInit() { } @Override public void doPeriodic() { pivot.getDbClient().rxQuery(JobSQL.SELECT_ALL_ACTIVE_JOBS) .map(result -> result.getRows().stream().map(JobHelper::fromDBRecord).collect(Collectors.toList())) .flatMap(jobs -> { // Every active job has its own worker, they should never be stopped List<String> activeWorkers = jobs.stream().map(job -> pivot.getScheduler().decide(job, null).blockingGet().getHostName() ).collect(Collectors.toList()); // Find all live workers in cloud cluster return pivot.getScheduler() .list() .flatMap(workers -> { for (Worker worker : workers) { // Only watch upon normal worker groups, any other special workers have their special lifecycle if (worker.getHostName().startsWith(K8SWorkerScheduler.getNormalWorkerPrefix())) { if (!activeWorkers.contains(worker.getHostName())) { return pivot.getScheduler().stop(worker).toSingleDefault(worker); } } } return Single.just(Worker.NOT_FOUND); }); }) .subscribe(n -> { if (n != Worker.NOT_FOUND) { LOGGER.info("Stopped abnormal worker {}/{}", n.getHostName(), n.getHostIP()); } end(); }, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } }
3,159
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/BaseTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.master.service.impl.Pivot; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.atomic.AtomicBoolean; public abstract class BaseTask { public static final Logger LOGGER = LoggerFactory.getLogger(BaseTask.class); protected final Pivot pivot; private final AtomicBoolean PROCESSING = new AtomicBoolean(false); BaseTask(Pivot pivot, Vertx vertx) { this.pivot = pivot; init(vertx); } public abstract String name(); public abstract long interval(); void end() { doEnd(); LOGGER.info("{} end", name()); PROCESSING.set(false); } public void doEnd() { } public void doInit() { } public abstract void doPeriodic(); private void init(Vertx vertx) { try { vertx.setPeriodic(interval(), this::periodic); doInit(); LOGGER.info("Init {} successfully", name()); } catch (Throwable t) { LOGGER.error("Init {} error", name(), t); System.exit(-1); } } private void periodic(Long ignored) { if (PROCESSING.get() || !PROCESSING.compareAndSet(false, true)) { return; } LOGGER.info("Start {}", name()); doPeriodic(); } public void trigger() { periodic(0L); } }
3,160
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/DiskUsageUpdatingTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Observable; import io.reactivex.Single; import io.vertx.ext.sql.UpdateResult; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.common.vo.DiskUsage; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.WorkerHelper; import org.eclipse.jifa.master.service.sql.WorkerSQL; import org.eclipse.jifa.master.support.WorkerClient; import java.util.List; import java.util.stream.Collectors; import static org.eclipse.jifa.common.util.Assertion.ASSERT; import static org.eclipse.jifa.common.util.GsonHolder.GSON; import static org.eclipse.jifa.master.Constant.uri; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class DiskUsageUpdatingTask extends BaseTask { public DiskUsageUpdatingTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } @Override public String name() { return "Worker disk usage updating task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("JOB-DISK-USAGE-UPDATING-PERIODIC")); } @Override public void doPeriodic() { getWorkers().flatMapObservable( workerList -> Observable.fromIterable(workerList) .flatMapSingle( worker -> WorkerClient.get(worker.getHostIP(), uri(Constant.SYSTEM_DISK_USAGE)) .map(resp -> GSON.fromJson(resp.bodyAsString(), DiskUsage.class)) .flatMap(usage -> updateWorkerDiskUsage(worker.getHostIP(), usage.getTotalSpaceInMb(), usage.getUsedSpaceInMb())) ) ).ignoreElements().subscribe(this::end, t -> { LOGGER.error("Execute {} error", name(), t); end(); }); } private Single<UpdateResult> updateWorkerDiskUsage(String hostIP, long totalSpaceInMb, long usedSpaceInMb) { return pivot.getDbClient() .rxUpdateWithParams(WorkerSQL.UPDATE_DISK_USAGE, ja(totalSpaceInMb, usedSpaceInMb, hostIP)) .doOnSuccess(updateResult -> ASSERT.isTrue(updateResult.getUpdated() == 1)); } private Single<List<Worker>> getWorkers() { return pivot.getDbClient() .rxQuery(WorkerSQL.SELECT_ALL) .map(records -> records.getRows().stream().map(WorkerHelper::fromDBRecord).collect(Collectors.toList()) ); } }
3,161
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/DiskCleaningTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Completable; import io.reactivex.Maybe; import io.reactivex.Observable; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.sql.ConfigSQL; import org.eclipse.jifa.master.service.sql.FileSQL; import org.eclipse.jifa.master.service.sql.WorkerSQL; import java.util.List; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class DiskCleaningTask extends BaseTask { public DiskCleaningTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } /** * The logic is straightforward, assume we have a disk cleaning thread named D and * a user thread named U, we can use CAS operation to handle both user requesting * and system disk cleaning, the pseudocode is as follows: * <code> * void U(){ * if(atomic.cmpxchg(in_use,0,1){ * // using * atomic.cmpxchg(in_use,1,0) * } * } * <p> * void D(){ * if(atomic.cmpxchg(in_use,0,2){ * // deleting * atomic.cmpxchg(in_use,2,0); * } * } * </code> */ private static Observable<Completable> markAndDeleteFiles(JDBCClient jdbcClient, Pivot pivot, List<String> workerIpList) { return Observable.fromIterable(workerIpList) .flatMap( workerIp -> jdbcClient.rxUpdateWithParams(FileSQL.UPDATE_AS_PENDING_DELETE_BY_HOST, ja(workerIp)) .ignoreElement() .andThen(jdbcClient.rxQueryWithParams(FileSQL.SELECT_PENDING_DELETE_BY_HOST, ja(workerIp)) .map(rs -> rs.getRows().stream() .map(row -> row.getString("name")) .collect(Collectors.toList())) .flatMapCompletable(fileNames -> pivot .deleteFile(Deleter.SYSTEM, fileNames.toArray(new String[0]))) ) .toObservable() ); } @Override public String name() { return "Disk cleaning task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("JOB-DISK-CLEANUP-PERIODIC")); } @Override public void doPeriodic() { // Get high dick overload workers, for each of them, get all files which // neither deleted nor used in that worker, finally, apply real disk cleanup // action for every file in that list isEnableDiskCleaning() .subscribe(val -> getHighDiskOverloadWorkers() .flatMapObservable(workerIpList -> markAndDeleteFiles(pivot.getDbClient(), pivot, workerIpList)) .ignoreElements() .subscribe( () -> { LOGGER.info("Execute {} successfully ", name()); this.end(); }, t -> { LOGGER.error("Execute {} error", name(), t); this.end(); } ) ); } private Maybe<List<String>> getHighDiskOverloadWorkers() { return pivot.getDbClient().rxQuery(WorkerSQL.SELECT_FOR_DISK_CLEANUP) .map(rs -> rs.getRows().stream().map(jo -> jo.getString("host_ip")).collect(Collectors.toList())) .filter(workers -> workers.size() > 0); } private Maybe<Long> isEnableDiskCleaning() { return pivot.getDbClient() .rxQueryWithParams(ConfigSQL.SELECT, ja("TASK-ENABLE-DISK-CLEANUP")) .map(resultSet -> resultSet.getRows().get(0).getString("value")) .map(Long::valueOf) .filter(value -> value == 1); } }
3,162
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/RetiringTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Completable; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.JobHelper; import java.time.Instant; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.ServiceAssertion.SERVICE_ASSERT; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; import static org.eclipse.jifa.master.service.sql.JobSQL.SELECT_TO_RETIRE; public class RetiringTask extends BaseTask { private static long MIN_TIMEOUT_THRESHOLD = 5 * 6000L; private static long timeoutThreshold; public RetiringTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } @Override public String name() { return "Retiring Task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("JOB-RETIRING-INTERVAL")); } @Override public void doInit() { timeoutThreshold = ConfigHelper.getLong(pivot.config("JOB-COMMON-TIMEOUT-THRESHOLD")); // timeout threshold must be greater than or equal to 5 min SERVICE_ASSERT.isTrue(timeoutThreshold >= MIN_TIMEOUT_THRESHOLD, ErrorCode.SANITY_CHECK); } @Override public void doPeriodic() { Instant instant = Instant.now().minusMillis(timeoutThreshold); pivot.getDbClient().rxQueryWithParams(SELECT_TO_RETIRE, ja(instant)) .map(result -> result.getRows().stream().map(JobHelper::fromDBRecord).collect(Collectors.toList())) .doOnSuccess(jobs -> LOGGER.info("Found timeout jobs: {}", jobs.size())) .map(jobs -> jobs.stream().map(pivot::finish).collect(Collectors.toList())) .flatMapCompletable(Completable::concat) .subscribe( this::end, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } }
3,163
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/FileSyncForK8STask.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Completable; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.apache.commons.io.FileUtils; import org.eclipse.jifa.common.JifaException; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.FileHelper; import org.eclipse.jifa.master.service.sql.FileSQL; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; /** * Clean up any stale files that does not record in database. The cleanup is * exactly different from FileSyncTask, otherwise, they look like the same one. */ public class FileSyncForK8STask extends FileSyncTask { // TODO: should reuse WorkerGlobal.WORKSPACE and related functionalities private static final String WORKSPACE = Constant.DEFAULT_WORKSPACE; public FileSyncForK8STask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } private static String dirPath(FileType type) { return WORKSPACE + java.io.File.separator + type.getTag(); } private static void delete(FileType type, String name) { try { java.io.File f = new java.io.File(dirPath(type) + java.io.File.separator + name); if (f.isDirectory()) { FileUtils.deleteDirectory(f); } } catch (IOException e) { LOGGER.error("Delete file failed", e); throw new JifaException(e); } } @Override public String name() { return "File Sync For K8S Task"; } @Override public void doPeriodic() { JDBCClient dbClient = pivot.getDbClient(); dbClient.rxQuery(FileSQL.SELECT_FILES_FOR_SYNC) .map(ar -> ar.getRows().stream().map(FileHelper::fromDBRecord).collect(Collectors.toList())) .flatMapCompletable( files -> { // Clean up any files that exist in workspace while not recorded in database // Merely a mirror of FileSupport.sync Map<FileType, List<String>> filesGroup = new HashMap<>(){{ for (FileType ft : FileType.values()) { // In case no files returned this.put(ft, new ArrayList<>()); } }}; for (File fi : files) { filesGroup.get(fi.getType()).add(fi.getName()); } long lastModified = System.currentTimeMillis() - Constant.STALE_THRESHOLD; for (FileType ft : filesGroup.keySet()) { List<String> names = filesGroup.get(ft); java.io.File[] listFiles = new java.io.File(dirPath(ft)).listFiles(); if (listFiles == null) { continue; } for (java.io.File lf : listFiles) { if (names.contains(lf.getName())) { continue; } LOGGER.info("{} is not synchronized", lf.getName()); if (isCleanStale() && lf.lastModified() < lastModified) { LOGGER.info("Delete stale file {}", lf.getName()); delete(ft, lf.getName()); } } } return Completable.complete(); } ).andThen(processLongTransferJob()) .subscribe(this::end, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } }
3,164
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/FileSyncTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Completable; import io.reactivex.Observable; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.FileHelper; import org.eclipse.jifa.master.service.impl.helper.WorkerHelper; import org.eclipse.jifa.master.service.sql.FileSQL; import org.eclipse.jifa.master.service.sql.JobSQL; import org.eclipse.jifa.master.service.sql.WorkerSQL; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class FileSyncTask extends BaseTask { private boolean cleanStale; public FileSyncTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } public boolean isCleanStale() { return cleanStale; } @Override public String name() { return "File Sync Task"; } @Override public long interval() { // 1 hour return 60 * 60 * 1000; } @Override public void doInit() { cleanStale = ConfigHelper.getBoolean(pivot.config("JOB-CLEAN-STALE-FILES")); } @Override public void doPeriodic() { pivot.getDbClient().rxQuery(WorkerSQL.SELECT_ALL) .map(records -> records.getRows() .stream() .map(WorkerHelper::fromDBRecord) .collect(Collectors.toList())) .flatMapCompletable(workers -> Observable.fromIterable(workers) .flatMapCompletable( worker -> pivot.syncFiles(worker, cleanStale) .doOnError(t -> LOGGER.error( "Failed to sync worker files for {} ", worker.getHostIP(), t)) .onErrorComplete() ) ).andThen(processLongTransferJob()).subscribe(this::end, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } /** * Mark files that takes too long time to upload and not recorded in active jobs as ERROR state */ protected Completable processLongTransferJob() { JDBCClient dbClient = pivot.getDbClient(); return dbClient.rxQuery(FileSQL.SELECT_TIMEOUT_IN_PROGRESS_FILE) .map(records -> records.getRows() .stream() .map(FileHelper::fromDBRecord) .collect(Collectors.toList())) .flatMapCompletable( files -> Observable.fromIterable(files) .flatMapCompletable( file -> dbClient.rxQueryWithParams(JobSQL.SELECT_TRANSFER_JOB_BY_NAME, ja(file.getName())) .flatMapCompletable( rs -> { if (rs.getRows().size() > 0) { return Completable.complete(); } // set state to error if not associated job return dbClient.rxUpdateWithParams(FileSQL.UPDATE_IN_PROGRESS_FILE_AS_ERROR_BY_NAME, ja(file.getName())) .ignoreElement(); }) .doOnError(t -> LOGGER .error("Failed to sync file transfer state for {}", file.getName(), t)) .onErrorComplete()) ); } }
3,165
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/SchedulingTask.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Single; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.JobHelper; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.sql.JobSQL.SELECT_ALL_PENDING; public class SchedulingTask extends BaseTask { private int index; private List<Job> pendingJobs = new ArrayList<>(); private Set<String> pinnedHostIPs = new HashSet<>(); public SchedulingTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } @Override public String name() { return "Scheduling Task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("JOB-SCHEDULING-INTERVAL")); } @Override public void doPeriodic() { pivot.getDbClient().rxQuery(SELECT_ALL_PENDING) .map( records -> records.getRows().stream().map(JobHelper::fromDBRecord).collect(Collectors.toList())) .map(jobs -> { pendingJobs.addAll(jobs); LOGGER.info("Found pending jobs: {}", pendingJobs.size()); return pendingJobs.size() > 0; }) .subscribe(hasPendingJobs -> { if (hasPendingJobs) { processNextPendingJob(); } else { end(); } }, t -> { LOGGER.error("Execute {} error", name(), t); end(); }); } @Override public void doEnd() { index = 0; pendingJobs.clear(); pinnedHostIPs.clear(); } private void processNextPendingJob() { if (index < pendingJobs.size()) { processJob(pendingJobs.get(index++)) .subscribe( next -> { if (next) { processNextPendingJob(); } }, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } else { end(); } } private Single<Boolean> processJob(Job job) { return job.getHostIP() == null ? processNoBindingJob(job) : processBindingJob(job); } private Single<Boolean> processNoBindingJob(Job job) { return pivot.inTransactionAndLock( conn -> pivot.selectMostIdleWorker(conn).flatMap( worker -> { long loadSum = worker.getCurrentLoad() + job.getEstimatedLoad(); if (loadSum > worker.getMaxLoad()) { return Single.just(false); } String hostIP = worker.getHostIP(); job.setHostIP(hostIP); return pivot.updatePendingJobToInProcess(conn, job) .andThen(pivot.updateWorkerLoad(conn, hostIP, loadSum)) .toSingleDefault(true); } ) ).doOnSuccess(e -> pivot.postInProgressJob(job).subscribe()); } private Single<Boolean> processBindingJob(Job job) { return pivot.inTransactionAndLock( conn -> pivot.selectWorker(conn, job.getHostIP()).flatMap( worker -> { String hostIP = worker.getHostIP(); if (pinnedHostIPs.contains(hostIP)) { return Single.just(true); } long loadSum = worker.getCurrentLoad() + job.getEstimatedLoad(); if (loadSum > worker.getMaxLoad()) { LOGGER.info("Pin host: {}", hostIP); pinnedHostIPs.add(hostIP); return Single.just(true); } return pivot.updatePendingJobToInProcess(conn, job) .andThen(pivot.updateWorkerLoad(conn, hostIP, loadSum)) .toSingleDefault(true); } ) ).doOnSuccess(e -> pivot.postInProgressJob(job).subscribe()); } }
3,166
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/task/PVCCleanupTask.java
/******************************************************************************** * Copyright (c) 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.task; import io.reactivex.Observable; import io.vertx.reactivex.core.Vertx; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.helper.ConfigHelper; import org.eclipse.jifa.master.service.impl.helper.FileHelper; import org.eclipse.jifa.master.service.sql.FileSQL; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; /** * Periodically clean up dump files that stored at k8s persistent volume claim. */ public class PVCCleanupTask extends BaseTask { private static final String WORKSPACE = "/root/jifa_workspace"; public PVCCleanupTask(Pivot pivot, Vertx vertx) { super(pivot, vertx); } @Override public String name() { return "PVC Cleanup Task"; } @Override public long interval() { return ConfigHelper.getLong(pivot.config("JOB-DISK-CLEANUP-PERIODIC")); } @Override public void doInit() { } @Override public void doPeriodic() { pivot.getDbClient().rxQuery(FileSQL.SELECT_DATED_FILES) .map(result -> result.getRows().stream().map(FileHelper::fromDBRecord).collect(Collectors.toList())) .doOnSuccess(files -> LOGGER.info("Found dated files for deletion: {}", files.size())) .flatMapCompletable(files -> Observable.fromIterable(files.stream().map(File::getName).collect(Collectors.toList())) .flatMapCompletable( fileName -> pivot.getDbClient() .rxUpdateWithParams(FileSQL.UPDATE_AS_PENDING_DELETE_BY_FILE_NAME, ja(fileName)) .ignoreElement() .andThen( pivot.getDbClient() .rxQueryWithParams(FileSQL.SELECT_PENDING_DELETE_BY_FILE_NAME, ja(fileName)) .map(rs -> rs.getRows().stream().map(row -> row.getString("name")).collect(Collectors.toList())) .flatMapCompletable(fileNames -> pivot .deleteFile(Deleter.SYSTEM, fileNames.toArray(new String[0])) .doOnComplete(()->LOGGER.info("Deleted {} files by {}", fileNames.size(), this.name())) ) ) ) ) .subscribe(() -> { this.end(); }, t -> { LOGGER.error("Execute {} error", name(), t); end(); } ); } }
3,167
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/model/TransferWay.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.model; import static org.eclipse.jifa.master.Constant.*; public enum TransferWay { URL(uri(TRANSFER_BY_URL), "url"), SCP(uri(TRANSFER_BY_SCP), "path"), OSS(uri(TRANSFER_BY_OSS), "objectName"), S3(uri(TRANSFER_BY_S3), "objectName"), UPLOAD(uri(FILE_UPLOAD), ""); private String[] pathKey; private String uri; TransferWay(String uri, String... pathKey) { this.pathKey = pathKey; this.uri = uri; } public String[] getPathKeys() { return pathKey; } public String getUri() { return uri; } }
3,168
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/model/User.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.model; import lombok.Data; @Data public class User { private String id; private String name; private boolean admin; public User(String id, String name, boolean admin) { this.id = id; this.name = name; this.admin = admin; } }
3,169
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/model/WorkerInfo.java
/******************************************************************************** * Copyright (c) 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.model; import lombok.Data; @Data public class WorkerInfo { private String name; private String ip; }
3,170
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/AdminService.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.GenIgnore; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.codegen.annotations.VertxGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.serviceproxy.ServiceBinder; import org.eclipse.jifa.master.entity.Admin; import org.eclipse.jifa.master.service.impl.AdminServiceImpl; import java.util.List; @ProxyGen @VertxGen public interface AdminService { @GenIgnore static void create(Vertx vertx, JDBCClient dbClient) { new ServiceBinder(vertx.getDelegate()).setAddress(AdminService.class.getSimpleName()) .register(AdminService.class, new AdminServiceImpl(dbClient)); } @GenIgnore static void createProxy(Vertx vertx) { ProxyDictionary.add(AdminService.class, new org.eclipse.jifa.master.service.reactivex.AdminService( new AdminServiceVertxEBProxy(vertx.getDelegate(), AdminService.class.getSimpleName()))); } void isAdmin(String userId, Handler<AsyncResult<Boolean>> handler); void add(String userId, Handler<AsyncResult<Void>> handler); void queryAll(Handler<AsyncResult<List<Admin>>> handler); }
3,171
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/WorkerService.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.GenIgnore; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.codegen.annotations.VertxGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.serviceproxy.ServiceBinder; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.WorkerServiceImpl; import java.util.List; @ProxyGen @VertxGen public interface WorkerService { @GenIgnore static void create(Vertx vertx, JDBCClient dbClient, Pivot pivot) { new ServiceBinder(vertx.getDelegate()).setAddress(WorkerService.class.getSimpleName()) .register(WorkerService.class, new WorkerServiceImpl(dbClient, pivot)); } @GenIgnore static void createProxy(Vertx vertx) { ProxyDictionary.add(WorkerService.class, new org.eclipse.jifa.master.service.reactivex.WorkerService( new WorkerServiceVertxEBProxy(vertx.getDelegate(), WorkerService.class.getSimpleName()))); } void queryAll(Handler<AsyncResult<List<Worker>>> handler); void diskCleanup(String hostIP, Handler<AsyncResult<Void>> handler); void selectMostIdleWorker(Handler<AsyncResult<Worker>> handler); void selectWorkerByIP(String hostIp, Handler<AsyncResult<Worker>> handler); }
3,172
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/JobService.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.GenIgnore; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.codegen.annotations.VertxGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.serviceproxy.ServiceBinder; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.service.impl.JobServiceImpl; import org.eclipse.jifa.master.service.impl.Pivot; import java.util.List; @ProxyGen @VertxGen public interface JobService { @GenIgnore static void create(Vertx vertx, Pivot pivot, JDBCClient dbClient) { new ServiceBinder(vertx.getDelegate()) .setAddress(JobService.class.getSimpleName()) .register(JobService.class, new JobServiceImpl(pivot, dbClient)); } @GenIgnore static void createProxy(Vertx vertx) { ProxyDictionary.add(JobService.class, new org.eclipse.jifa.master.service.reactivex.JobService( new JobServiceVertxEBProxy(vertx.getDelegate(), JobService.class.getSimpleName()))); } void findActive(JobType jobType, String target, Handler<AsyncResult<Job>> handler); void pendingJobsInFrontOf(Job job, Handler<AsyncResult<List<Job>>> handler); void allocate(String userId, String hostIP, JobType jobType, String target, String attachment, long estimatedLoad, boolean immediate, Handler<AsyncResult<Job>> handler); void finish(JobType type, String target, Handler<AsyncResult<Void>> handler); }
3,173
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/ProxyDictionary.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import static org.eclipse.jifa.common.util.Assertion.ASSERT; public class ProxyDictionary { private static Map<String, Object> proxyMap = new ConcurrentHashMap<>(); static synchronized void add(Class<?> serviceInterface, Object proxy) { proxyMap.put(serviceInterface.getSimpleName(), proxy); } @SuppressWarnings("unchecked") public static <T> T lookup(Class<?> key) { Object proxy = proxyMap.get(key.getSimpleName()); ASSERT.notNull(proxy); return (T) proxy; } }
3,174
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/ServiceAssertion.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.serviceproxy.ServiceException; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.util.Assertion; public class ServiceAssertion extends Assertion { public static final ServiceAssertion SERVICE_ASSERT = new ServiceAssertion(); @Override protected void throwEx(ErrorCode errorCode, String message) { throw new ServiceException(errorCode.ordinal(), message); } }
3,175
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/FileService.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.GenIgnore; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.codegen.annotations.VertxGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.serviceproxy.ServiceBinder; import org.eclipse.jifa.common.enums.FileTransferState; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.model.TransferWay; import org.eclipse.jifa.master.service.impl.FileServiceImpl; import org.eclipse.jifa.master.service.impl.Pivot; import java.util.List; import java.util.Map; @ProxyGen @VertxGen public interface FileService { @GenIgnore static void create(Vertx vertx, Pivot pivot, JDBCClient dbClient) { new ServiceBinder(vertx.getDelegate()) .setAddress(FileService.class.getSimpleName()) .register(FileService.class, new FileServiceImpl(pivot, dbClient)); } @GenIgnore static void createProxy(Vertx vertx) { ProxyDictionary.add(FileService.class, new org.eclipse.jifa.master.service.reactivex.FileService( new FileServiceVertxEBProxy(vertx.getDelegate(), FileService.class.getSimpleName()))); } void count(String userId, FileType type, String expectedFilename, Handler<AsyncResult<Integer>> handler); void files(String userId, FileType type, String expectedFilename, int page, int pageSize, Handler<AsyncResult<List<File>>> handler); void file(String name, Handler<AsyncResult<File>> handler); void deleteFile(String name, Deleter deleter, Handler<AsyncResult<Void>> handler); void transfer(String userId, FileType type, String originalName, String name, TransferWay transferWay, Map<String, String> transferInfo, Handler<AsyncResult<Job>> handler); void transferDone(String name, FileTransferState transferState, long size, Handler<AsyncResult<Void>> handler); void setShared(String name, Handler<AsyncResult<Void>> handler); void updateDisplayName(String name, String displayName, Handler<AsyncResult<Void>> handler); }
3,176
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/ConfigService.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.GenIgnore; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.codegen.annotations.VertxGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.serviceproxy.ServiceBinder; import org.eclipse.jifa.master.service.impl.ConfigServiceImpl; @ProxyGen @VertxGen public interface ConfigService { @GenIgnore static void create(Vertx vertx, JDBCClient dbClient) { new ServiceBinder(vertx.getDelegate()).setAddress(ConfigService.class.getSimpleName()) .register(ConfigService.class, new ConfigServiceImpl(dbClient)); } @GenIgnore static void createProxy(Vertx vertx) { ProxyDictionary.add(ConfigService.class, new org.eclipse.jifa.master.service.reactivex.ConfigService( new ConfigServiceVertxEBProxy(vertx.getDelegate(), ConfigService.class.getSimpleName()))); } void getConfig(String configName, Handler<AsyncResult<String>> handler); }
3,177
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/ServiceVerticle.java
/******************************************************************************** * Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import com.alibaba.druid.pool.DruidDataSource; import io.reactivex.Single; import io.vertx.core.Promise; import io.vertx.core.json.JsonObject; import io.vertx.reactivex.core.AbstractVerticle; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.service.impl.Pivot; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ServiceVerticle extends AbstractVerticle implements Constant { private static final Logger LOGGER = LoggerFactory.getLogger(ServiceVerticle.class); @Override public void start(Promise <Void> startFuture) { vertx.rxExecuteBlocking(future -> { // init Data source DruidDataSource ds = new DruidDataSource(); JsonObject dbConfig = config().getJsonObject(DB_KEYWORD); ds.setUrl(dbConfig.getString(DB_URL)); ds.setUsername(dbConfig.getString(DB_USERNAME)); ds.setPassword(dbConfig.getString(DB_PASSWORD)); ds.setDriverClassName(dbConfig.getString(DB_DRIVER_CLASS_NAME)); // create proxy first AdminService.createProxy(vertx); JobService.createProxy(vertx); ConfigService.createProxy(vertx); WorkerService.createProxy(vertx); FileService.createProxy(vertx); SupportService.createProxy(vertx); LOGGER.info("Create service proxy done"); JDBCClient jdbcClient = new JDBCClient(io.vertx.ext.jdbc.JDBCClient.create(vertx.getDelegate(), ds)); Pivot pivot = Pivot.createInstance(vertx, jdbcClient, config()); JobService.create(vertx, pivot, jdbcClient); FileService.create(vertx, pivot, jdbcClient); ConfigService.create(vertx, jdbcClient); AdminService.create(vertx, jdbcClient); WorkerService.create(vertx, jdbcClient, pivot); SupportService.create(vertx, jdbcClient, pivot); LOGGER.info("Create service done"); future.complete(Single.just(this)); }).subscribe(f -> startFuture.complete(), startFuture::fail); } }
3,178
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/SupportService.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.GenIgnore; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.codegen.annotations.VertxGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.serviceproxy.ServiceBinder; import org.eclipse.jifa.master.service.impl.Pivot; import org.eclipse.jifa.master.service.impl.SupportServiceImpl; @ProxyGen @VertxGen public interface SupportService { @GenIgnore static void create(Vertx vertx, JDBCClient dbClient, Pivot pivot) { new ServiceBinder(vertx.getDelegate()) .setIncludeDebugInfo(true) .setAddress(SupportService.class.getSimpleName()) .register(SupportService.class, new SupportServiceImpl(dbClient, pivot)); } @GenIgnore static void createProxy(Vertx vertx) { ProxyDictionary.add(SupportService.class, new org.eclipse.jifa.master.service.reactivex.SupportService( new SupportServiceVertxEBProxy(vertx.getDelegate(), SupportService.class.getSimpleName()))); } void isDBConnectivity(Handler<AsyncResult<Boolean>> handler); void startDummyWorker(Handler<AsyncResult<Void>> handler); void stopDummyWorker(Handler<AsyncResult<Void>> handler); }
3,179
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/package-info.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ @ModuleGen(groupPackage = "org.eclipse.jifa.master.service", name = "Service") package org.eclipse.jifa.master.service; import io.vertx.codegen.annotations.ModuleGen;
3,180
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/WorkerServiceImpl.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.CompletableHelper; import io.vertx.reactivex.SingleHelper; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.service.WorkerService; import org.eclipse.jifa.master.service.impl.helper.WorkerHelper; import org.eclipse.jifa.master.service.sql.FileSQL; import org.eclipse.jifa.master.service.sql.WorkerSQL; import java.util.List; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class WorkerServiceImpl implements WorkerService, WorkerSQL { private final JDBCClient dbClient; private final Pivot pivot; public WorkerServiceImpl(JDBCClient dbClient, Pivot pivot) { this.dbClient = dbClient; this.pivot = pivot; } @Override public void queryAll(Handler<AsyncResult<List<Worker>>> handler) { dbClient.rxQuery(WorkerSQL.SELECT_ALL) .map(records -> records.getRows() .stream() .map(WorkerHelper::fromDBRecord) .collect(Collectors.toList())) .subscribe(SingleHelper.toObserver(handler)); } @Override public void diskCleanup(String hostIP, Handler<AsyncResult<Void>> handler) { dbClient.rxUpdateWithParams(FileSQL.UPDATE_AS_PENDING_DELETE_BY_HOST, ja(hostIP)) .ignoreElement() .andThen(dbClient.rxQueryWithParams(FileSQL.SELECT_PENDING_DELETE_BY_HOST, ja(hostIP)) .map(rs -> rs.getRows().stream().map(row -> row.getString("name")) .collect(Collectors.toList())) .flatMapCompletable( fileNames -> pivot.deleteFile(Deleter.ADMIN, fileNames.toArray(new String[0]))) ) .subscribe(CompletableHelper.toObserver(handler)); } @Override public void selectMostIdleWorker(Handler<AsyncResult<Worker>> handler) { dbClient.rxGetConnection() .flatMap(conn -> pivot.selectMostIdleWorker(conn).doOnTerminate(conn::close)) .subscribe(SingleHelper.toObserver(handler)); } @Override public void selectWorkerByIP(String hostIp, Handler<AsyncResult<Worker>> handler) { dbClient.rxQueryWithParams(WorkerSQL.SELECT_BY_IP, ja(hostIp)) .map(ar -> { if (ar.getRows().size() > 0) { return WorkerHelper.fromDBRecord(ar.getRows().get(0)); } return Worker.NOT_FOUND; }) .subscribe(SingleHelper.toObserver(handler)); } }
3,181
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/FileServiceImpl.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import com.google.common.base.Strings; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.core.json.JsonArray; import io.vertx.reactivex.CompletableHelper; import io.vertx.reactivex.SingleHelper; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.common.enums.FileTransferState; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.model.TransferWay; import org.eclipse.jifa.master.service.FileService; import org.eclipse.jifa.master.service.impl.helper.FileHelper; import org.eclipse.jifa.master.service.impl.helper.SQLAssert; import org.eclipse.jifa.master.service.sql.FileSQL; import org.eclipse.jifa.master.service.sql.SQL; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.eclipse.jifa.common.util.GsonHolder.GSON; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; import static org.eclipse.jifa.master.service.sql.FileSQL.*; public class FileServiceImpl implements FileService, Constant { private static final long TRANSFER_JOB_LOAD = 5; private final Pivot pivot; private final JDBCClient dbClient; public FileServiceImpl(Pivot pivot, JDBCClient dbClient) { this.pivot = pivot; this.dbClient = dbClient; } @Override public void count(String userId, FileType type, String expectedFilename, Handler<AsyncResult<Integer>> handler) { boolean fuzzy = !Strings.isNullOrEmpty(expectedFilename); expectedFilename = "%" + expectedFilename + "%"; String sql = fuzzy ? COUNT_BY_USER_ID_AND_TYPE_AND_EXPECTED_NAME : COUNT_BY_USER_ID_AND_TYPE; JsonArray params = fuzzy ? ja(userId, type, expectedFilename, expectedFilename) : ja(userId, type); dbClient.rxQueryWithParams(sql, params) .map(resultSet -> resultSet.getRows().get(0).getInteger(SQL.COUNT_NAME)) .subscribe(SingleHelper.toObserver(handler)); } @Override public void files(String userId, FileType type, String expectedFilename, int page, int pageSize, Handler<AsyncResult<List<File>>> handler) { boolean fuzzy = !Strings.isNullOrEmpty(expectedFilename); expectedFilename = "%" + expectedFilename + "%"; String sql = fuzzy ? SELECT_BY_USER_ID_AND_TYPE_AND_EXPECTED_NAME : SELECT_BY_USER_ID_AND_TYPE; JsonArray params = fuzzy ? ja(userId, type, expectedFilename, expectedFilename, (page - 1) * pageSize, pageSize) : ja(userId, type, (page - 1) * pageSize, pageSize); dbClient.rxQueryWithParams(sql, params) .map(ar -> ar.getRows().stream().map(FileHelper::fromDBRecord).collect(Collectors.toList())) .subscribe(SingleHelper.toObserver(handler)); } @Override public void file(String name, Handler<AsyncResult<File>> handler) { dbClient.rxQueryWithParams(FileSQL.SELECT_FILE_BY_NAME, ja(name)) .map(ar -> { if (ar.getRows().size() > 0) { return FileHelper.fromDBRecord(ar.getRows().get(0)); } return File.NOT_FOUND; }) .subscribe(SingleHelper.toObserver(handler)); } @Override public void deleteFile(String name, Deleter deleter, Handler<AsyncResult<Void>> handler) { pivot.deleteFile(deleter, name) .subscribe(CompletableHelper.toObserver(handler)); } @Override public void transfer(String userId, FileType type, String originalName, String name, TransferWay transferWay, Map<String, String> transferInfo, Handler<AsyncResult<Job>> handler) { File file = new File(); file.setUserId(userId); file.setOriginalName(originalName); file.setName(name); file.setType(type); file.setSize(0); file.setTransferState(FileTransferState.IN_PROGRESS); file.setTransferWay(transferWay); file.setTransferInfo(transferInfo); boolean immediate = false; if (transferWay == TransferWay.OSS || (transferWay == TransferWay.SCP && !Boolean.parseBoolean(transferInfo.get("usePublicKey")))) { immediate = true; } pivot.allocate(userId, null, JobType.FILE_TRANSFER, name, GSON.toJson(file), TRANSFER_JOB_LOAD, immediate) .subscribe(SingleHelper.toObserver(handler)); } @Override public void transferDone(String name, FileTransferState transferState, long size, Handler<AsyncResult<Void>> handler) { pivot.transferDone(name, transferState, size).subscribe(CompletableHelper.toObserver(handler)); } @Override public void setShared(String name, Handler<AsyncResult<Void>> handler) { dbClient.rxUpdateWithParams(FileSQL.SET_SHARED, ja(name)) .ignoreElement() .subscribe(CompletableHelper.toObserver(handler)); } @Override public void updateDisplayName(String name, String displayName, Handler<AsyncResult<Void>> handler) { dbClient.rxUpdateWithParams(FileSQL.UPDATE_DISPLAY_NAME, ja(displayName, name)) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement() .subscribe(CompletableHelper.toObserver(handler)); } }
3,182
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/JobServiceImpl.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import io.reactivex.Single; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.ext.sql.ResultSet; import io.vertx.reactivex.CompletableHelper; import io.vertx.reactivex.SingleHelper; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.service.JobService; import org.eclipse.jifa.master.service.impl.helper.JobHelper; import org.eclipse.jifa.master.service.impl.helper.SQLHelper; import org.eclipse.jifa.master.service.sql.JobSQL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Instant; import java.util.List; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class JobServiceImpl implements JobService, JobSQL, Constant { private static final Logger LOGGER = LoggerFactory.getLogger(JobServiceImpl.class); private final JDBCClient dbClient; private final Pivot pivot; public JobServiceImpl(Pivot pivot, JDBCClient dbClient) { this.pivot = pivot; this.dbClient = dbClient; } @Override public void findActive(JobType jobType, String target, Handler<AsyncResult<Job>> handler) { dbClient.rxQueryWithParams(SELECT_ACTIVE_BY_TYPE_AND_TARGET, ja(jobType, target)) .flatMap(resultSet -> { if (resultSet.getNumRows() == 0) { return Single.just(Job.NOT_FOUND); } Job job = JobHelper.fromDBRecord(resultSet.getRows().get(0)); if (job.getState() != JobState.IN_PROGRESS) { return Single.just(job); } return dbClient.rxUpdateWithParams(UPDATE_ACCESS_TIME, ja(jobType, target)) .ignoreElement() .toSingleDefault(job); }) .subscribe(SingleHelper.toObserver(handler)); } @Override public void pendingJobsInFrontOf(Job job, Handler<AsyncResult<List<Job>>> handler) { String hostIP = job.getHostIP(); Instant instant = Instant.ofEpochMilli(job.getCreationTime()); Single<ResultSet> single = hostIP == null ? dbClient.rxQueryWithParams(SELECT_FRONT_PENDING, ja(instant)) : dbClient.rxQueryWithParams(SELECT_FRONT_PENDING_BY_HOST_IP, ja(hostIP, instant)); single.map(records -> records.getRows().stream().map(JobHelper::fromDBRecord) .filter(fontJob -> job.getId() != fontJob.getId()) .collect(Collectors.toList())) .subscribe(SingleHelper.toObserver(handler)); } @Override public void allocate(String userId, String hostIP, JobType jobType, String target, String attachment, long estimatedLoad, boolean immediate, Handler<AsyncResult<Job>> handler) { pivot.allocate(userId, hostIP, jobType, target, attachment, estimatedLoad, immediate) .subscribe(SingleHelper.toObserver(handler)); } @Override public void finish(JobType type, String target, Handler<AsyncResult<Void>> handler) { dbClient.rxQueryWithParams(SELECT_ACTIVE_BY_TYPE_AND_TARGET, ja(type, target)) .map(SQLHelper::singleRow) .map(JobHelper::fromDBRecord) .flatMapCompletable(pivot::finish) .subscribe(CompletableHelper.toObserver(handler)); } }
3,183
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/ConfigServiceImpl.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.SingleHelper; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.service.ConfigService; import org.eclipse.jifa.master.service.sql.ConfigSQL; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class ConfigServiceImpl implements ConfigService { private final JDBCClient dbClient; public ConfigServiceImpl(JDBCClient dbClient) { this.dbClient = dbClient; } @Override public void getConfig(String configName, Handler<AsyncResult<String>> handler) { dbClient.rxQueryWithParams(ConfigSQL.SELECT, ja(configName)) .map(resultSet -> resultSet.getRows().get(0).getString("value")) .subscribe(SingleHelper.toObserver(handler)); } }
3,184
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/Pivot.java
/******************************************************************************** * Copyright (c) 2020, 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import io.reactivex.Completable; import io.reactivex.Observable; import io.reactivex.Single; import io.reactivex.observables.GroupedObservable; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.ext.sql.ResultSet; import io.vertx.reactivex.core.Vertx; import io.vertx.reactivex.core.buffer.Buffer; import io.vertx.reactivex.ext.jdbc.JDBCClient; import io.vertx.reactivex.ext.sql.SQLClientHelper; import io.vertx.reactivex.ext.sql.SQLConnection; import io.vertx.reactivex.ext.web.client.HttpResponse; import org.apache.commons.io.FileUtils; import org.eclipse.jifa.common.ErrorCode; import org.eclipse.jifa.common.enums.FileTransferState; import org.eclipse.jifa.common.enums.ProgressState; import org.eclipse.jifa.common.vo.TransferProgress; import org.eclipse.jifa.master.Constant; import org.eclipse.jifa.master.entity.File; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.Master; import org.eclipse.jifa.master.entity.Worker; import org.eclipse.jifa.master.entity.enums.Deleter; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.entity.enums.JobType; import org.eclipse.jifa.master.model.TransferWay; import org.eclipse.jifa.master.service.impl.helper.*; import org.eclipse.jifa.master.service.sql.*; import org.eclipse.jifa.master.support.Factory; import org.eclipse.jifa.master.support.Pattern; import org.eclipse.jifa.master.support.WorkerClient; import org.eclipse.jifa.master.support.WorkerScheduler; import org.eclipse.jifa.master.task.SchedulingTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import static org.eclipse.jifa.common.util.GsonHolder.GSON; import static org.eclipse.jifa.master.Constant.LOCAL_HOST; import static org.eclipse.jifa.master.Constant.uri; import static org.eclipse.jifa.master.service.ServiceAssertion.SERVICE_ASSERT; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; import static org.eclipse.jifa.master.service.sql.JobSQL.*; import static org.eclipse.jifa.master.support.WorkerClient.post; public class Pivot { private static final Logger LOGGER = LoggerFactory.getLogger(Pivot.class); private static final String JOB_LOCK_KEY = "MASTER-JOB-LOCK"; private static final String SCHEDULER_PATTERN = "SCHEDULER-PATTERN"; private static final String PENDING_JOB_MAX_COUNT_KEY = "JOB-PENDING-MAX-COUNT"; private static final String NOTIFY_WORKER_ACTION_DELAY_KEY = "JOB-NOTIFY-WORKER-ACTION-DELAY"; private static final String TRIGGER_SCHEDULING_ACTION_DELAY = "JOB-TRIGGER-SCHEDULING-ACTION-DELAY"; private static Pivot SINGLETON; private Master currentMaster; private JDBCClient dbClient; private Vertx vertx; private int pendingJobMaxCount; private long notifyWorkerActionDelay; private long triggerSchedulingActionDelay; private SchedulingTask schedulingTask; private WorkerScheduler scheduler; private boolean isDefaultPattern; private Pivot() { } public WorkerScheduler getScheduler() { return scheduler; } public static Pivot getInstance() { return SINGLETON; } public static synchronized Pivot createInstance(Vertx vertx, JDBCClient dbClient, JsonObject config) { if (SINGLETON != null) { return SINGLETON; } Pivot jm = new Pivot(); try { jm.dbClient = dbClient; jm.vertx = vertx; Pattern pattern = Pattern.valueOf(ConfigHelper.getString(jm.config(SCHEDULER_PATTERN))); jm.isDefaultPattern = pattern == Pattern.DEFAULT; jm.scheduler = Factory.create(pattern); jm.pendingJobMaxCount = ConfigHelper.getInt(jm.config(PENDING_JOB_MAX_COUNT_KEY)); jm.notifyWorkerActionDelay = ConfigHelper.getLong(jm.config(NOTIFY_WORKER_ACTION_DELAY_KEY)); jm.triggerSchedulingActionDelay = ConfigHelper.getLong(jm.config(TRIGGER_SCHEDULING_ACTION_DELAY)); String ip = org.eclipse.jifa.master.Master.DEV_MODE || pattern == Pattern.K8S ? LOCAL_HOST : InetAddress.getLocalHost().getHostAddress(); LOGGER.info("Current Master Host IP is {}", ip); SQLConnection sqlConnection = dbClient.rxGetConnection().blockingGet(); jm.currentMaster = jm.selectMaster(sqlConnection, ip).doFinally(sqlConnection::close).blockingGet(); jm.scheduler.initialize(jm, vertx, config); } catch (Throwable t) { LOGGER.error("Init job service error", t); System.exit(-1); } SINGLETON = jm; return jm; } public boolean isLeader() { return currentMaster.isLeader(); } public boolean isDefaultPattern() { return isDefaultPattern; } public void setSchedulingTask(SchedulingTask task) { this.schedulingTask = task; } private static Job buildPendingJob(String userId, String hostIP, JobType jobType, String target, String attachment, long estimatedLoad, boolean immediate) { Job job = new Job(); job.setUserId(userId); job.setType(jobType); job.setTarget(target); job.setState(JobState.PENDING); job.setHostIP(hostIP); job.setAttachment(attachment); job.setEstimatedLoad(estimatedLoad); job.setImmediate(immediate); return job; } public JDBCClient getDbClient() { return dbClient; } Single<Job> allocate(String userId, String hostIP, JobType jobType, String target, String attachment, long estimatedLoad, boolean immediate) { Job job = buildPendingJob(userId, hostIP, jobType, target, attachment, estimatedLoad, immediate); return doAllocate(job); } private Single<Job> doAllocate(Job job) { return scheduler.start(job) .andThen(inTransactionAndLock( conn -> checkPendingCont(conn, job) .flatMap(pendingCount -> doAllocate(conn, job, pendingCount)) ).flatMap(i -> postInProgressJob(job).andThen(Single.just(job)))); } private Single<Job> doAllocate(SQLConnection conn, Job job, int pendingCount) { job.setState(JobState.PENDING); if (pendingCount > 0) { return conn.rxUpdateWithParams(INSERT_ACTIVE, buildActiveParams(job)).map(i -> job); } return setFileUsed(conn, job).andThen(scheduler.decide(job, conn).flatMap( worker -> { if (scheduler.supportPendingJob()) { String selectedHostIP = worker.getHostIP(); long loadSum = worker.getCurrentLoad() + job.getEstimatedLoad(); if (loadSum <= worker.getMaxLoad()) { // new in progress job job.setHostIP(selectedHostIP); job.setState(JobState.IN_PROGRESS); return insertActiveJob(conn, job) .andThen(updateWorkerLoad(conn, selectedHostIP, loadSum)) .toSingleDefault(job); } SERVICE_ASSERT.isTrue(!job.isImmediate(), ErrorCode.IMMEDIATE_JOB); return insertActiveJob(conn, job).toSingleDefault(job); } else { String selectedHostIP = worker.getHostIP(); // new in progress job job.setHostIP(selectedHostIP); job.setState(JobState.IN_PROGRESS); return insertActiveJob(conn, job).toSingleDefault(job); } } )); } public Single<Boolean> isDBConnectivity() { return dbClient.rxGetConnection() .flatMap(conn -> selectTrue(conn).doOnTerminate( () -> { LOGGER.info("Close connection for DB connectivity test"); conn.close(); })) .onErrorReturn(t -> { t.printStackTrace(); return Boolean.FALSE; }); } private Map<String, String> buildQueryFileTransferProgressParams(File file) { Map<String, String> map = new HashMap<>(); map.put("name", file.getName()); map.put("type", file.getType().name()); return map; } private Completable processTransferProgressResult(String name, HttpResponse resp) { if (resp.statusCode() != Constant.HTTP_GET_OK_STATUS_CODE) { LOGGER.warn("Query file transfer progress error : {}", resp.bodyAsString()); return Completable.complete(); } TransferProgress progress = GSON.fromJson(resp.bodyAsString(), TransferProgress.class); ProgressState state = progress.getState(); if (state.isFinal()) { return transferDone(name, FileTransferState.fromProgressState(state), progress.getTotalSize()); } return Completable.complete(); } public Completable processTimeoutTransferJob(Job job) { return dbClient.rxGetConnection() .flatMapCompletable(conn -> selectFileOrNotFount(conn, job.getTarget()) .flatMapCompletable(file -> { if (file.found()) { return scheduler.decide(job, conn) .flatMapCompletable(worker -> { if (worker == Worker.NOT_FOUND) { // Job has been timeout, but there is no corresponding worker which indicated // by Job's hostIP, this only happens in K8S mode, i.e. worker has been stopped // due to some reasons but Job is still presented. We would update Job transferState // as Error unconditionally. return Completable.complete() .doOnComplete(() -> SERVICE_ASSERT.isTrue(!isDefaultPattern, "Only happens in K8S Mode")) .andThen(transferDone(job.getTarget(), FileTransferState.ERROR, 0)); } else { return WorkerClient.get(job.getHostIP(), uri(Constant.TRANSFER_PROGRESS), buildQueryFileTransferProgressParams(file)) .flatMapCompletable( resp -> processTransferProgressResult(job.getTarget(), resp)); } }); } else { return finish(job); } }) .doOnTerminate(conn::close) ) .doOnError((t) -> LOGGER.warn("Process time out transfer job {} error", job.getTarget(), t)) .onErrorComplete(); } Completable transferDone(String name, FileTransferState transferState, long size) { return dbClient.rxGetConnection() .flatMap(conn -> selectActiveJob(conn, JobType.FILE_TRANSFER, name).doOnTerminate(conn::close)) .flatMapCompletable( job -> finish(job, conn -> updateFileTransferResult(conn, job, name, transferState, size)) ); } private HashMap<String, String> buildParams(File... files) { @SuppressWarnings("rawtypes") Map[] maps = new Map[files.length]; for (int i = 0; i < files.length; i++) { Map<String, String> map = new HashMap<>(); map.put("name", files[i].getName()); map.put("type", files[i].getType().name()); maps[i] = map; } return new HashMap<>(1) {{ put("files", GSON.toJson(maps)); }}; } private Completable batchDeleteFiles(SQLConnection conn, Deleter deleter, GroupedObservable<String, File> groupedFiles) { String hostIP = groupedFiles.getKey(); return groupedFiles .toList() .flatMapCompletable(files -> Observable.fromIterable(files) .flatMapSingle(file -> selectActiveJob(conn, file.getName()) .doOnSuccess(jobs -> { if (deleter != Deleter.SYSTEM) { SERVICE_ASSERT .isTrue(jobs.size() == 0, ErrorCode.FILE_IS_IN_USED); } }) ) .ignoreElements() .andThen( deleteFileRecords(conn, deleter, files.toArray(new File[0]))) .andThen( isDefaultPattern ? post(hostIP, uri(Constant.FILE_BATCH_DELETE), buildParams(files.toArray(new File[0]))) .doOnSuccess(resp -> SERVICE_ASSERT .isTrue(resp.statusCode() == Constant.HTTP_POST_CREATED_STATUS, resp.bodyAsString())).ignoreElement() : Completable.fromAction( () -> { for (File file : files) { // FIXME: consider adding delete files // api in worker scheduler // copy from pvc clean up task java.io.File pf = new java.io.File("/root/jifa_workspace" + java.io.File.separator + file.getType().getTag() + java.io.File.separator + file.getName()); FileUtils.deleteDirectory(pf); } } ) ) ); } public Completable deleteFile(Deleter deleter, String... fileNames) { return dbClient.rxGetConnection() .flatMapCompletable( conn -> Observable.fromArray(fileNames) .flatMapSingle(fileName -> selectFile(conn, fileName)) .groupBy(File::getHostIP) .flatMapCompletable( groupedFiles -> batchDeleteFiles(conn, deleter, groupedFiles)) .doOnTerminate(conn::close) ); } public Completable syncFiles(Worker w, boolean cleanStale) { return dbClient.rxQueryWithParams(FileSQL.SELECT_FILES_FOR_SYNC, SQLHelper.ja(w.getHostIP())) .map(ar -> ar.getRows().stream().map(FileHelper::fromDBRecord).collect(Collectors.toList())) .flatMapCompletable( files -> { HashMap<String, String> params = buildParams(files.toArray(new File[0])); params.put("cleanStale", String.valueOf(cleanStale)); return post(w.getHostIP(), uri(Constant.FILE_SYNC), params) .ignoreElement(); } ); } public Completable finish(Job job) { return finish(job, conn -> Completable.complete()); } private Completable finish(Job job, Function<SQLConnection, Completable> post) { return inTransactionAndLock( conn -> scheduler.decide(job, conn) .flatMapCompletable(worker -> { if (isDefaultPattern()) { return updateWorkerLoad(conn, worker.getHostIP(), worker.getCurrentLoad() - job.getEstimatedLoad()); } else { return Completable.complete(); } }) .andThen(insertHistoricalJob(conn, job)) // delete old job .andThen(deleteActiveJob(conn, job)) .andThen(this.setFileUnused(conn, job)) .andThen(post.apply(conn)) .toSingleDefault(job) ).ignoreElement() // notify worker .andThen(this.notifyWorkerJobIsFinished(job)) // stop worker .andThen(scheduler.stop(job)) // trigger scheduling .andThen(this.triggerScheduling()); } private Completable setFileUsed(SQLConnection conn, Job job) { if (job.getType() != JobType.FILE_TRANSFER) { return conn.rxUpdateWithParams(FileSQL.UPDATE_FILE_AS_USED, ja(job.getTarget())) .doOnSuccess( record -> SERVICE_ASSERT.isTrue(record.getUpdated() == 1, "Operation " + job.getType().toString() + " failure due to unknown error")) .ignoreElement(); } return Completable.complete(); } private Completable setFileUnused(SQLConnection conn, Job job) { if (job.getType() != JobType.FILE_TRANSFER) { return conn.rxUpdateWithParams(FileSQL.UPDATE_FILE_AS_UNUSED, ja(job.getTarget())) .doOnSuccess( record -> SERVICE_ASSERT.isTrue(record.getUpdated() == 1, "Operation " + job.getType().toString() + " failure due to unknown error")) .ignoreElement(); } return Completable.complete(); } public Completable postInProgressJob(Job job) { switch (job.getType()) { case FILE_TRANSFER: File file = new File(new JsonObject(job.getAttachment())); JsonArray params = new JsonArray(); params.add(file.getUserId()); params.add(file.getOriginalName()); params.add(file.getName()); params.add(file.getType()); params.add(0); params.add(job.getHostIP()); params.add(FileTransferState.IN_PROGRESS); // shared/downloadable/in shared disk/deleted params.add(false).add(false).add(false).add(false); params.add(0); return inTransaction( conn -> conn.rxUpdateWithParams(FileSQL.INSERT, params) .doOnSuccess(SQLAssert::assertUpdated).ignoreElement() .andThen( // upload file need special process file.getTransferWay() != TransferWay.UPLOAD ? post(job.getHostIP(), file.getTransferWay().getUri(), file.getTransferInfo()) .doOnSuccess(resp -> SERVICE_ASSERT.isTrue( resp.statusCode() == Constant.HTTP_POST_CREATED_STATUS, resp.bodyAsString())).ignoreElement() : Completable.complete() ).toSingleDefault(job)) .doOnError(e -> finish(job).subscribe()).ignoreElement(); default: return Completable.complete(); } } private Completable notifyWorkerJobIsFinished(Job job) { JobType type = job.getType(); String hostIP = job.getHostIP(); String target = job.getTarget(); return Completable.fromAction(() -> { switch (type) { case HEAP_DUMP_ANALYSIS: case GCLOG_ANALYSIS: case THREAD_DUMP_ANALYSIS: vertx.setTimer(notifyWorkerActionDelay, ignored -> { String url = "/jifa-api/" + type.getTag() + "/" + target + "/release"; Single<HttpResponse<Buffer>> post = post(job.getHostIP(), url); post.subscribe(resp -> { if (resp.statusCode() != Constant.HTTP_POST_CREATED_STATUS) { LOGGER.error("Notify worker {} to release task error, result : {}", hostIP, resp.bodyAsString()); } }, t -> LOGGER.error("Notify worker {} to release task error", hostIP)); }); default: } }); } private Completable triggerScheduling() { if (scheduler.supportPendingJob()) { return Completable .fromAction(() -> { if (currentMaster.leader) { vertx.setTimer(triggerSchedulingActionDelay, ignored -> schedulingTask.trigger()); } }); } return Completable.complete(); } public JsonObject config(String name) { return SQLHelper.singleRow(dbClient.rxQueryWithParams(ConfigSQL.SELECT, ja(name)).blockingGet()); } public <T> Single<T> inTransaction(Function<SQLConnection, Single<T>> sourceSupplier) { return SQLClientHelper.inTransactionSingle(dbClient, sourceSupplier); } public <T> Single<T> inTransactionAndLock(Function<SQLConnection, Single<T>> sourceSupplier) { return SQLClientHelper.inTransactionSingle(dbClient, conn -> lock(conn).andThen(sourceSupplier.apply(conn)) ); } private Completable lock(SQLConnection conn) { return conn.rxQueryWithParams(GlobalLockSQL.LOCK, ja(JOB_LOCK_KEY)) .doOnSuccess(SQLAssert::assertSelected) .ignoreElement(); } private JsonArray buildFileParams(File file) { return ja(file.getUserId(), file.getOriginalName(), file.getName(), file.getType(), file.getSize(), file.getHostIP(), file.getTransferState(), file.isShared(), file.isDownloadable(), file.isInSharedDisk(), file.isDeleted()); } private JsonArray buildActiveParams(Job job) { return ja(job.getUserId(), job.getType(), job.getState(), job.getTarget(), job.getHostIP(), job.getState() == JobState.PENDING ? job.getAttachment() : null, job.getEstimatedLoad(), job.getType().isFileTransfer()); } private JsonArray buildHistoricalParams(Job job) { return ja(job.getUserId(), job.getType(), job.getTarget(), job.getHostIP(), job.getEstimatedLoad()); } private Single<Integer> checkPendingCont(SQLConnection conn, Job job) { if (scheduler.supportPendingJob()) { Single<ResultSet> s = job.getHostIP() == null ? conn.rxQuery(COUNT_ALL_PENDING) : conn.rxQueryWithParams(COUNT_PENDING_BY_HOST_IP, ja(job.getHostIP())); return s.map(SQLHelper::count) .doOnSuccess((pending) -> SERVICE_ASSERT.isTrue(pending < pendingJobMaxCount, ErrorCode.SERVER_TOO_BUSY)); } return Single.just(0); } public Single<Worker> decideWorker(SQLConnection conn, Job job) { return job.getHostIP() == null ? selectMostIdleWorker(conn) : selectWorker(conn, job.getHostIP()); } private Single<Master> selectMaster(SQLConnection conn, String hostIP) { return conn.rxQueryWithParams(MasterSQL.SELECT, ja(hostIP)) .map(SQLHelper::singleRow) .map(MasterHelper::fromDBRecord); } public Single<Worker> selectMostIdleWorker(SQLConnection conn) { return conn.rxQuery(WorkerSQL.SELECT_MOST_IDLE) .map(SQLHelper::singleRow) .map(WorkerHelper::fromDBRecord); } public Single<Worker> selectWorker(SQLConnection conn, String hostIP) { return conn.rxQueryWithParams(WorkerSQL.SELECT_BY_IP, ja(hostIP)) .map(SQLHelper::singleRow) .map(WorkerHelper::fromDBRecord); } public Completable updateWorkerLoad(SQLConnection conn, String hostIP, long load) { SERVICE_ASSERT.isTrue(load >= 0, ErrorCode.SANITY_CHECK); return conn.rxUpdateWithParams(WorkerSQL.UPDATE_LOAD, ja(load, hostIP)) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement(); } private Completable insertActiveJob(SQLConnection conn, Job job) { return conn.rxUpdateWithParams(INSERT_ACTIVE, buildActiveParams(job)) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement(); } private Single<Boolean> selectTrue(SQLConnection conn) { return conn.rxQuery(SQL.SELECT_TRUE) .map(SQLHelper::singleRow) .map(value -> value.getInteger("TRUE") == 1 ? Boolean.TRUE : Boolean.FALSE) .onErrorReturn(e -> { e.printStackTrace(); return Boolean.FALSE; }); } private Completable deleteActiveJob(SQLConnection conn, Job job) { return conn.rxUpdateWithParams(DELETE_ACTIVE_BY_TYPE_AND_TARGET, ja(job.getType(), job.getTarget())) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement(); } private Single<Job> selectActiveJob(SQLConnection conn, JobType jobType, String target) { return conn.rxQueryWithParams(JobSQL.SELECT_ACTIVE_BY_TYPE_AND_TARGET, ja(jobType, target)) .map(SQLHelper::singleRow) .map(JobHelper::fromDBRecord); } private Single<List<Job>> selectActiveJob(SQLConnection conn, String target) { return conn.rxQueryWithParams(JobSQL.SELECT_ACTIVE_BY_TARGET, ja(target)) .map(records -> records.getRows().stream().map(JobHelper::fromDBRecord).collect(Collectors.toList()) ); } private Completable insertHistoricalJob(SQLConnection conn, Job job) { return conn.rxUpdateWithParams(INSERT_HISTORICAL, buildHistoricalParams(job)) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement(); } public Completable updatePendingJobToInProcess(SQLConnection conn, Job job) { JsonArray params = ja(job.getHostIP(), Instant.now(), job.getType(), job.getTarget()); return conn.rxUpdateWithParams(JobSQL.UPDATE_TO_IN_PROGRESS, params) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement(); } private Single<File> selectFile(SQLConnection conn, String name) { return conn.rxQueryWithParams(FileSQL.SELECT_FILE_BY_NAME, ja(name)) .map(SQLHelper::singleRow) .map(FileHelper::fromDBRecord); } private Single<File> selectFileOrNotFount(SQLConnection conn, String name) { return conn.rxQueryWithParams(FileSQL.SELECT_FILE_BY_NAME, ja(name)) .map(res -> { if (res.getNumRows() > 0) { return FileHelper.fromDBRecord(SQLHelper.singleRow(res)); } return File.NOT_FOUND; }); } private Completable deleteFileRecords(SQLConnection conn, Deleter deleter, File... files) { return Observable.fromArray(files) .flatMapCompletable( file -> conn.rxUpdateWithParams(FileSQL.DELETE_FILE_BY_NAME, ja(deleter, file.getName())) .doOnSuccess(SQLAssert::assertUpdated).ignoreElement()); } private Completable updateFileTransferResult(SQLConnection conn, Job job, String name, FileTransferState transferState, long size) { return conn.rxUpdateWithParams(FileSQL.UPDATE_TRANSFER_RESULT, ja(transferState, size, name)) // file record may not exist for some reason // .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement(); } }
3,185
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/SupportServiceImpl.java
/******************************************************************************** * Copyright (c) 2022 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.CompletableHelper; import io.vertx.reactivex.SingleHelper; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.service.SupportService; import org.eclipse.jifa.master.support.K8SWorkerScheduler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SupportServiceImpl implements SupportService { private static final Logger LOGGER = LoggerFactory.getLogger(SupportServiceImpl.class.getName()); private final Pivot pivot; public SupportServiceImpl(JDBCClient dbClient, Pivot pivot) { this.pivot = pivot; } @Override public void isDBConnectivity(Handler<AsyncResult<Boolean>> handler) { pivot.isDBConnectivity().subscribe(SingleHelper.toObserver(handler)); } @Override public void startDummyWorker(Handler<AsyncResult<Void>> handler) { assert pivot.getScheduler() instanceof K8SWorkerScheduler : "unexpected call"; String testWorkerName = K8SWorkerScheduler.getSpecialWorkerPrefix() + "-health-test"; Job dummyJob = new Job(); dummyJob.setTarget(testWorkerName); pivot.getScheduler().start(dummyJob).subscribe(CompletableHelper.toObserver(handler)); } @Override public void stopDummyWorker(Handler<AsyncResult<Void>> handler) { assert pivot.getScheduler() instanceof K8SWorkerScheduler : "unexpected call"; String testWorkerName = K8SWorkerScheduler.getSpecialWorkerPrefix() + "-health-test"; Job dummyJob = new Job(); dummyJob.setTarget(testWorkerName); pivot.getScheduler().stop(dummyJob).subscribe(CompletableHelper.toObserver(handler)); } }
3,186
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/AdminServiceImpl.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.reactivex.CompletableHelper; import io.vertx.reactivex.SingleHelper; import io.vertx.reactivex.ext.jdbc.JDBCClient; import org.eclipse.jifa.master.entity.Admin; import org.eclipse.jifa.master.service.AdminService; import org.eclipse.jifa.master.service.impl.helper.AdminHelper; import org.eclipse.jifa.master.service.impl.helper.SQLAssert; import org.eclipse.jifa.master.service.sql.AdminSQL; import java.util.List; import java.util.stream.Collectors; import static org.eclipse.jifa.master.service.impl.helper.SQLHelper.ja; public class AdminServiceImpl implements AdminService { private final JDBCClient dbClient; public AdminServiceImpl(JDBCClient dbClient) { this.dbClient = dbClient; } @Override public void isAdmin(String userId, Handler<AsyncResult<Boolean>> resultHandler) { dbClient.rxQueryWithParams(AdminSQL.SELECT_BY_USER_ID, ja(userId)) .map(resultSet -> resultSet.getNumRows() == 1) .subscribe(SingleHelper.toObserver(resultHandler)); } @Override public void add(String userId, Handler<AsyncResult<Void>> handler) { dbClient.rxUpdateWithParams(AdminSQL.INSERT, ja(userId)) .doOnSuccess(SQLAssert::assertUpdated) .ignoreElement() .subscribe(CompletableHelper.toObserver(handler)); } @Override public void queryAll(Handler<AsyncResult<List<Admin>>> handler) { dbClient.rxQuery(AdminSQL.QUERY_ALL) .map(records -> records.getRows().stream().map(AdminHelper::fromDBRecord).collect(Collectors.toList())) .subscribe(SingleHelper.toObserver(handler)); } }
3,187
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/FileHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; import org.eclipse.jifa.common.enums.FileTransferState; import org.eclipse.jifa.common.enums.FileType; import org.eclipse.jifa.master.entity.File; import java.time.Instant; public class FileHelper { public static File fromDBRecord(JsonObject jsonObject) { File file = new File(); EntityHelper.fill(file, jsonObject); file.setUserId(jsonObject.getString("user_id")); file.setOriginalName(jsonObject.getString("original_name")); file.setDisplayName(jsonObject.getString("display_name")); file.setName(jsonObject.getString("name")); file.setType(FileType.valueOf(jsonObject.getString("type"))); file.setSize(jsonObject.getLong("size")); file.setHostIP(jsonObject.getString("host_ip")); file.setTransferState(FileTransferState.valueOf(jsonObject.getString("transfer_state"))); file.setShared(jsonObject.getInteger("shared") == 1); file.setDownloadable(jsonObject.getInteger("downloadable") == 1); file.setInSharedDisk(jsonObject.getInteger("in_shared_disk") == 1); file.setDeleted(jsonObject.getInteger("deleted") == 1); Instant deletedTime = jsonObject.getInstant("deleted_time"); if (deletedTime != null) { file.setDeletedTime(deletedTime.toEpochMilli()); } return file; } }
3,188
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/WorkerHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; import org.eclipse.jifa.master.entity.Worker; public class WorkerHelper { public static Worker fromDBRecord(JsonObject jsonObject) { Worker worker = new Worker(); EntityHelper.fill(worker, jsonObject); worker.setHostIP(jsonObject.getString("host_ip")); worker.setHostName(jsonObject.getString("host_name")); worker.setCurrentLoad(jsonObject.getLong("current_load")); worker.setMaxLoad(jsonObject.getLong("max_load")); worker.setCpuCount(jsonObject.getLong("cpu_count")); worker.setMemoryUsed(jsonObject.getLong("memory_used")); worker.setMemoryTotal(jsonObject.getLong("memory_total")); worker.setDiskUsed(jsonObject.getLong("disk_used")); worker.setDiskTotal(jsonObject.getLong("disk_total")); return worker; } }
3,189
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/EntityHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; import org.eclipse.jifa.master.entity.Entity; class EntityHelper { static void fill(Entity entity, JsonObject json) { entity.setId(json.getLong("id")); entity.setLastModifiedTime(json.getInstant("last_modified_time").toEpochMilli()); entity.setCreationTime(json.getInstant("creation_time").toEpochMilli()); } }
3,190
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/MasterHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; import org.eclipse.jifa.master.entity.Master; public class MasterHelper { public static Master fromDBRecord(JsonObject jsonObject) { Master master = new Master(); EntityHelper.fill(master, jsonObject); master.setHostIP(jsonObject.getString("host_ip")); master.setHostName(jsonObject.getString("host_name")); master.setLeader(jsonObject.getInteger("leader") == 1); return master; } }
3,191
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/SQLAssert.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.ext.sql.ResultSet; import io.vertx.ext.sql.UpdateResult; import org.eclipse.jifa.common.ErrorCode; import static org.eclipse.jifa.master.service.ServiceAssertion.SERVICE_ASSERT; public class SQLAssert { public static void assertSelected(ResultSet resultSet) { assertSelected(resultSet, 1); } public static void assertSelected(ResultSet result, int expected) { SERVICE_ASSERT.isTrue(result.getNumRows() == expected, ErrorCode.SANITY_CHECK); } public static void assertUpdated(UpdateResult result) { assertUpdated(result, 1); } public static void assertUpdated(UpdateResult result, int expected) { SERVICE_ASSERT.isTrue(result.getUpdated() == expected, ErrorCode.SANITY_CHECK); } }
3,192
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/AdminHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; import org.eclipse.jifa.master.entity.Admin; public class AdminHelper { public static Admin fromDBRecord(JsonObject jsonObject) { Admin job = new Admin(); EntityHelper.fill(job, jsonObject); job.setUserId(jsonObject.getString("user_id")); return job; } }
3,193
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/JobHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; import org.eclipse.jifa.master.entity.Job; import org.eclipse.jifa.master.entity.enums.JobState; import org.eclipse.jifa.master.entity.enums.JobType; import java.time.Instant; public class JobHelper { public static Job fromDBRecord(JsonObject jsonObject) { Job job = new Job(); EntityHelper.fill(job, jsonObject); job.setUserId(jsonObject.getString("user_id")); job.setType(JobType.valueOf(jsonObject.getString("type"))); job.setTarget(jsonObject.getString("target")); job.setState(JobState.valueOf(jsonObject.getString("state"))); job.setHostIP(jsonObject.getString("host_ip")); job.setEstimatedLoad(jsonObject.getLong("estimated_load")); job.setAttachment(jsonObject.getString("attachment")); Instant accessTime = jsonObject.getInstant("access_time"); if (accessTime != null) { job.setAccessTime(accessTime.toEpochMilli()); } return job; } }
3,194
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/SQLHelper.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.ext.sql.ResultSet; import org.eclipse.jifa.master.service.sql.SQL; import java.time.Instant; public class SQLHelper { public static JsonObject singleRow(ResultSet resultSet) { SQLAssert.assertSelected(resultSet); return resultSet.getRows().get(0); } public static int count(ResultSet resultSet) { SQLAssert.assertSelected(resultSet); return resultSet.getRows().get(0).getInteger(SQL.COUNT_NAME); } public static JsonArray ja(String param) { return new JsonArray().add(param); } public static JsonArray ja(String param1, String param2) { return ja(param1).add(param2); } public static JsonArray ja(Instant param) { return new JsonArray().add(param); } public static JsonArray ja(Object... params) { JsonArray jsonArray = new JsonArray(); for (Object param : params) { if (param instanceof JsonArray) { jsonArray.addAll((JsonArray) param); } else if (param instanceof Enum) { jsonArray.add((Enum) param); } else { jsonArray.add(param); } } return jsonArray; } }
3,195
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/impl/helper/ConfigHelper.java
/******************************************************************************** * Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.impl.helper; import io.vertx.core.json.JsonObject; public class ConfigHelper { public static int getInt(JsonObject json) { return Integer.parseInt(json.getString("value")); } public static long getLong(JsonObject json) { return Long.parseLong(json.getString("value")); } public static boolean getBoolean(JsonObject json) { return Boolean.parseBoolean(json.getString("value")); } public static String getString(JsonObject json) { return json.getString("value"); } }
3,196
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/sql/ConfigSQL.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.sql; public interface ConfigSQL { String SELECT = "SELECT * FROM config WHERE name = ?"; }
3,197
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/sql/JobSQL.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.sql; public interface JobSQL { String COUNT_ALL_PENDING = "SELECT COUNT(*) FROM active_job WHERE state = 'PENDING'"; String COUNT_PENDING_BY_HOST_IP = "SELECT COUNT(*) FROM active_job WHERE state = 'PENDING' AND (host_ip IS NULL or host_ip = ?)"; String SELECT_ALL_PENDING = "SELECT * FROM active_job WHERE state = 'PENDING' ORDER BY creation_time ASC"; String INSERT_ACTIVE = "INSERT INTO active_job(user_id, type, state, target, host_ip, attachment, estimated_load, keep_alive) VALUE" + "(?, ?, ?, ?, ?, ?, ?, ?)"; String UPDATE_ACCESS_TIME = "UPDATE active_job SET access_time = now() WHERE type = ? AND target = ?"; String SELECT_ACTIVE_BY_TYPE_AND_TARGET = "SELECT * FROM active_job WHERE type = ? AND target = ?"; String SELECT_ACTIVE_BY_TARGET = "SELECT * FROM active_job WHERE target = ?"; String DELETE_ACTIVE_BY_TYPE_AND_TARGET = "DELETE FROM active_job WHERE type = ? AND target = ?"; String INSERT_HISTORICAL = "INSERT INTO historical_job(user_id, type, target, host_ip, estimated_load) VALUE(?, ?, ?, ?, ?)"; String SELECT_FRONT_PENDING = "SELECT * FROM active_job WHERE state = 'PENDING' AND creation_time < ? ORDER BY creation_time ASC"; String SELECT_FRONT_PENDING_BY_HOST_IP = "SELECT * FROM active_job WHERE state = 'PENDING' AND (host_ip IS NULL or host_ip = ?) AND " + "creation_time < ? ORDER BY creation_time ASC"; String SELECT_TO_RETIRE = "SELECT * FROM active_job WHERE state = 'IN_PROGRESS' AND keep_alive = 0 AND access_time <= ?"; String SELECT_TRANSFER_JOB_TO_FILLING_RESULT = "SELECT * FROM active_job WHERE state = 'IN_PROGRESS' AND type = 'FILE_TRANSFER' AND access_time <= ?"; String UPDATE_TO_IN_PROGRESS = "UPDATE active_job SET state = 'IN_PROGRESS', host_ip = ?, access_time = ? WHERE type = ? AND target = ?"; String SELECT_ALL_ACTIVE_JOBS = "SELECT * FROM active_job"; String SELECT_TRANSFER_JOB_BY_NAME = "SELECT * FROM active_job WHERE type = 'FILE_TRANSFER' AND target = ?"; }
3,198
0
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service
Create_ds/eclipse-jifa/backend/master/src/main/java/org/eclipse/jifa/master/service/sql/GlobalLockSQL.java
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 ********************************************************************************/ package org.eclipse.jifa.master.service.sql; public interface GlobalLockSQL { String LOCK = "SELECT * FROM global_lock WHERE name = ? FOR UPDATE"; }
3,199