code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
@Override protected CompletableFuture<ThreadDumpInfo> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { return gateway.requestThreadDump(timeout); }
Rest handler which serves the thread dump info from the JobManager.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/cluster/JobManagerThreadDumpHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/cluster/JobManagerThreadDumpHandler.java
Apache-2.0
@Override protected CompletableFuture<EmptyResponseBody> handleRequest( @Nonnull final HandlerRequest<EmptyRequestBody> request, @Nonnull final RestfulGateway gateway) throws RestHandlerException { return gateway.shutDownCluster().thenApply(ignored -> EmptyResponseBody.getInstance()); }
REST handler which allows to shut down the cluster.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/cluster/ShutdownHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/cluster/ShutdownHandler.java
Apache-2.0
@Override protected R handleRequest( HandlerRequest<EmptyRequestBody> request, ExecutionGraphInfo executionGraphInfo) throws RestHandlerException { return handleRequest(request, executionGraphInfo.getArchivedExecutionGraph()); }
{@code AbstractAccessExecutionGraphHandler} handles requests that require accessing the job's {@link AccessExecutionGraph}. @param <R> the response type @param <M> the message parameter type
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AbstractAccessExecutionGraphHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AbstractAccessExecutionGraphHandler.java
Apache-2.0
@Override protected CompletableFuture<R> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { JobID jobId = request.getPathParameter(JobIDPathParameter.class); CompletableFuture<ExecutionGraphInfo> executionGraphFuture = executionGraphCache.getExecutionGraphInfo(jobId, gateway); return executionGraphFuture .thenApplyAsync( executionGraph -> { try { return handleRequest(request, executionGraph); } catch (RestHandlerException rhe) { throw new CompletionException(rhe); } }, executor) .exceptionally( throwable -> { throwable = ExceptionUtils.stripCompletionException(throwable); if (throwable instanceof FlinkJobNotFoundException) { throw new CompletionException( new NotFoundException( String.format("Job %s not found", jobId), throwable)); } else { throw new CompletionException(throwable); } }); }
Base class for all {@link ExecutionGraphInfo} based REST handlers. @param <R> response type @param <M> job message parameter type
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AbstractExecutionGraphHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AbstractExecutionGraphHandler.java
Apache-2.0
public static AsynchronousJobOperationKey of(final TriggerId triggerId, final JobID jobId) { return new AsynchronousJobOperationKey(triggerId, jobId); }
A pair of {@link JobID} and {@link TriggerId} used as a key to a hash based collection. @see AbstractAsynchronousOperationHandlers
of
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AsynchronousJobOperationKey.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AsynchronousJobOperationKey.java
Apache-2.0
@Override public CompletableFuture<EmptyResponseBody> handleRequest( HandlerRequest<EmptyRequestBody> request, RestfulGateway gateway) throws RestHandlerException { final JobID jobId = request.getPathParameter(JobIDPathParameter.class); final List<TerminationModeQueryParameter.TerminationMode> terminationModes = request.getQueryParameter(TerminationModeQueryParameter.class); final TerminationModeQueryParameter.TerminationMode terminationMode; if (terminationModes.isEmpty()) { terminationMode = defaultTerminationMode; } else { // picking the first termination mode value terminationMode = terminationModes.get(0); } final CompletableFuture<Acknowledge> terminationFuture; switch (terminationMode) { case CANCEL: terminationFuture = gateway.cancelJob(jobId, timeout); break; case STOP: throw new RestHandlerException( "The termination mode \"stop\" has been removed. For " + "an ungraceful shutdown, please use \"cancel\" instead. For a graceful shutdown, " + "please use \"jobs/:jobId/stop\" instead.", HttpResponseStatus.PERMANENT_REDIRECT); default: terminationFuture = FutureUtils.completedExceptionally( new RestHandlerException( "Unknown termination mode " + terminationMode + '.', HttpResponseStatus.BAD_REQUEST)); } return terminationFuture.handle( (Acknowledge ack, Throwable throwable) -> { if (throwable != null) { Throwable error = ExceptionUtils.stripCompletionException(throwable); if (error instanceof FlinkJobTerminatedWithoutCancellationException) { throw new CompletionException( new RestHandlerException( String.format( "Job cancellation failed because the job has already reached another terminal state (%s).", ((FlinkJobTerminatedWithoutCancellationException) error) .getJobStatus()), HttpResponseStatus.CONFLICT)); } else if (error instanceof TimeoutException) { throw new CompletionException( new RestHandlerException( "Job cancellation timed out.", HttpResponseStatus.REQUEST_TIMEOUT, error)); } else if (error instanceof FlinkJobNotFoundException) { throw new CompletionException( new RestHandlerException( "Job could not be found.", HttpResponseStatus.NOT_FOUND, error)); } else { throw new CompletionException( new RestHandlerException( "Job cancellation failed: " + error.getMessage(), HttpResponseStatus.INTERNAL_SERVER_ERROR, error)); } } else { return EmptyResponseBody.getInstance(); } }); }
Request handler for the cancel and stop request.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobCancellationHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobCancellationHandler.java
Apache-2.0
@Override public CompletableFuture<EmptyResponseBody> handleRequest( HandlerRequest<JobClientHeartbeatRequestBody> request, RestfulGateway gateway) throws RestHandlerException { return gateway.reportJobClientHeartbeat( request.getPathParameter(JobIDPathParameter.class), request.getRequestBody().getExpiredTimestamp(), timeout) .handle( (Void ack, Throwable error) -> { if (error != null) { String errorMessage = "Fail to report jobClient's heartbeat: " + error.getMessage(); LOG.error(errorMessage, error); throw new CompletionException( new RestHandlerException( errorMessage, HttpResponseStatus.INTERNAL_SERVER_ERROR, error)); } else { return EmptyResponseBody.getInstance(); } }); }
Receive the heartbeat from the client.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobClientHeartbeatHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobClientHeartbeatHandler.java
Apache-2.0
@Override protected JobVertexAccumulatorsInfo handleRequest( HandlerRequest<EmptyRequestBody> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException { StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified(); ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length); for (StringifiedAccumulatorResult acc : accs) { userAccumulatorList.add( new UserAccumulator(acc.getName(), acc.getType(), acc.getValue())); } return new JobVertexAccumulatorsInfo( jobVertex.getJobVertexId().toString(), userAccumulatorList); }
Request handler for the job vertex accumulators.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexAccumulatorsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexAccumulatorsHandler.java
Apache-2.0
private static JobVertexBackPressureInfo.VertexBackPressureLevel getBackPressureLevel( double backPressureRatio) { if (backPressureRatio <= 0.10) { return JobVertexBackPressureInfo.VertexBackPressureLevel.OK; } else if (backPressureRatio <= 0.5) { return JobVertexBackPressureInfo.VertexBackPressureLevel.LOW; } else { return JobVertexBackPressureInfo.VertexBackPressureLevel.HIGH; } }
Returns the back pressure level as a String. @param backPressureRatio Ratio of back pressures samples to total number of samples. @return Back pressure level ('ok', 'low', or 'high')
getBackPressureLevel
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexBackPressureHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexBackPressureHandler.java
Apache-2.0
@Override protected SubtaskExecutionAttemptDetailsInfo handleRequest( HandlerRequest<EmptyRequestBody> request, AccessExecutionVertex executionVertex) throws RestHandlerException { final AccessExecution execution = executionVertex.getCurrentExecutionAttempt(); final JobID jobID = request.getPathParameter(JobIDPathParameter.class); final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class); final Collection<AccessExecution> attempts = executionVertex.getCurrentExecutions(); List<SubtaskExecutionAttemptDetailsInfo> otherConcurrentAttempts = null; if (attempts.size() > 1) { otherConcurrentAttempts = new ArrayList<>(); for (AccessExecution attempt : attempts) { if (attempt.getAttemptNumber() != execution.getAttemptNumber()) { otherConcurrentAttempts.add( SubtaskExecutionAttemptDetailsInfo.create( attempt, metricFetcher, jobID, jobVertexID, null)); } } } return SubtaskExecutionAttemptDetailsInfo.create( execution, metricFetcher, jobID, jobVertexID, otherConcurrentAttempts); }
Request handler providing details about a single task execution attempt.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/SubtaskCurrentAttemptDetailsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/SubtaskCurrentAttemptDetailsHandler.java
Apache-2.0
@Override protected SubtasksAllAccumulatorsInfo handleRequest( HandlerRequest<EmptyRequestBody> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException { JobVertexID jobVertexId = jobVertex.getJobVertexId(); int parallelism = jobVertex.getParallelism(); final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>(); for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) { for (AccessExecution execution : vertex.getCurrentExecutions()) { TaskManagerLocation location = execution.getAssignedResourceLocation(); String host = location == null ? "(unassigned)" : location.getHostname(); String endpoint = location == null ? "(unassigned)" : location.getEndpoint(); StringifiedAccumulatorResult[] accs = execution.getUserAccumulatorsStringified(); List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length); for (StringifiedAccumulatorResult acc : accs) { userAccumulators.add( new UserAccumulator(acc.getName(), acc.getType(), acc.getValue())); } subtaskAccumulatorsInfos.add( new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo( execution.getParallelSubtaskIndex(), execution.getAttemptNumber(), endpoint, userAccumulators)); } } return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos); }
Request handler for the subtasks all accumulators.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/SubtasksAllAccumulatorsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/SubtasksAllAccumulatorsHandler.java
Apache-2.0
@Override protected R handleCheckpointStatsRequest( HandlerRequest<EmptyRequestBody> request, CheckpointStatsSnapshot checkpointStatsSnapshot) throws RestHandlerException { JobID jobId = request.getPathParameter(JobIDPathParameter.class); final long checkpointId = request.getPathParameter(CheckpointIdPathParameter.class); if (checkpointStatsSnapshot != null) { AbstractCheckpointStats checkpointStats = checkpointStatsSnapshot.getHistory().getCheckpointById(checkpointId); if (checkpointStats != null) { checkpointStatsCache.tryAdd(checkpointStats); } else { checkpointStats = checkpointStatsCache.tryGet(checkpointId); } if (checkpointStats != null) { return handleCheckpointRequest(request, checkpointStats); } else { throw new RestHandlerException( "Could not find checkpointing statistics for checkpoint " + checkpointId + '.', HttpResponseStatus.NOT_FOUND); } } else { throw new RestHandlerException( "Checkpointing was not enabled for job " + jobId + '.', HttpResponseStatus.NOT_FOUND); } }
Base class for checkpoint related REST handler. @param <R> type of the response
handleCheckpointStatsRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/AbstractCheckpointHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/AbstractCheckpointHandler.java
Apache-2.0
@Override protected CompletableFuture<R> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { JobID jobId = request.getPathParameter(JobIDPathParameter.class); try { return checkpointStatsSnapshotCache .get(jobId, () -> gateway.requestCheckpointStats(jobId, timeout)) .thenApplyAsync( checkpointStatsSnapshot -> { try { return handleCheckpointStatsRequest( request, checkpointStatsSnapshot); } catch (RestHandlerException e) { throw new CompletionException(e); } }, executor) .exceptionally( throwable -> { throwable = ExceptionUtils.stripCompletionException(throwable); if (throwable instanceof FlinkJobNotFoundException) { throw new CompletionException( new NotFoundException( String.format("Job %s not found", jobId), throwable)); } else { throw new CompletionException(throwable); } }); } catch (ExecutionException e) { CompletableFuture<R> future = new CompletableFuture<>(); future.completeExceptionally(e); return future; } }
Abstract class for checkpoint handlers that will cache the {@link CheckpointStatsSnapshot} object. @param <R> the response type @param <M> the message parameters
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/AbstractCheckpointStatsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/AbstractCheckpointStatsHandler.java
Apache-2.0
public void tryAdd(AbstractCheckpointStats checkpoint) { // Don't add in progress checkpoints as they will be replaced by their // completed/failed version eventually. if (cache != null && checkpoint != null && !checkpoint.getStatus().isInProgress()) { cache.put(checkpoint.getCheckpointId(), checkpoint); } }
Try to add the checkpoint to the cache. @param checkpoint Checkpoint to be added.
tryAdd
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatsCache.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatsCache.java
Apache-2.0
public AbstractCheckpointStats tryGet(long checkpointId) { if (cache != null) { return cache.getIfPresent(checkpointId); } else { return null; } }
Try to look up a checkpoint by it's ID in the cache. @param checkpointId ID of the checkpoint to look up. @return The checkpoint or <code>null</code> if checkpoint not found.
tryGet
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatsCache.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/checkpoints/CheckpointStatsCache.java
Apache-2.0
@Override protected CompletableFuture<ClientCoordinationResponseBody> handleRequest( @Nonnull HandlerRequest<ClientCoordinationRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { JobID jobId = request.getPathParameter(JobIDPathParameter.class); String operatorUid = request.getPathParameter(OperatorUidPathParameter.class); SerializedValue<CoordinationRequest> serializedRequest = request.getRequestBody().getSerializedCoordinationRequest(); CompletableFuture<CoordinationResponse> responseFuture = gateway.deliverCoordinationRequestToCoordinator( jobId, operatorUid, serializedRequest, timeout); return responseFuture.thenApply( coordinationResponse -> { try { return new ClientCoordinationResponseBody( new SerializedValue<>(coordinationResponse)); } catch (IOException e) { throw new CompletionException( new RestHandlerException( "Failed to serialize coordination response", HttpResponseStatus.INTERNAL_SERVER_ERROR, e)); } }); }
Handler that receives the coordination requests from the client and returns the response from the coordinator.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/coordination/ClientCoordinationHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/coordination/ClientCoordinationHandler.java
Apache-2.0
@Override protected CompletableFuture<AggregatedMetricsResponseBody> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { return CompletableFuture.supplyAsync( () -> { try { fetcher.update(); List<String> requestedMetrics = request.getQueryParameter(MetricsFilterParameter.class); List<MetricsAggregationParameter.AggregationMode> requestedAggregations = request.getQueryParameter(MetricsAggregationParameter.class); MetricStore store = fetcher.getMetricStore(); Collection<? extends MetricStore.ComponentMetricStore> stores = getStores(store, request); if (requestedMetrics.isEmpty()) { Collection<String> list = getAvailableMetrics(stores); return new AggregatedMetricsResponseBody( list.stream() .map(AggregatedMetric::new) .collect(Collectors.toList())); } DoubleAccumulator.DoubleMinimumFactory minimumFactory = null; DoubleAccumulator.DoubleMaximumFactory maximumFactory = null; DoubleAccumulator.DoubleAverageFactory averageFactory = null; DoubleAccumulator.DoubleSumFactory sumFactory = null; DoubleAccumulator.DoubleDataSkewFactory skewFactory = null; // by default we return all aggregations if (requestedAggregations.isEmpty()) { minimumFactory = DoubleAccumulator.DoubleMinimumFactory.get(); maximumFactory = DoubleAccumulator.DoubleMaximumFactory.get(); averageFactory = DoubleAccumulator.DoubleAverageFactory.get(); sumFactory = DoubleAccumulator.DoubleSumFactory.get(); skewFactory = DoubleAccumulator.DoubleDataSkewFactory.get(); } else { for (MetricsAggregationParameter.AggregationMode aggregation : requestedAggregations) { switch (aggregation) { case MIN: minimumFactory = DoubleAccumulator.DoubleMinimumFactory.get(); break; case MAX: maximumFactory = DoubleAccumulator.DoubleMaximumFactory.get(); break; case AVG: averageFactory = DoubleAccumulator.DoubleAverageFactory.get(); break; case SUM: sumFactory = DoubleAccumulator.DoubleSumFactory.get(); break; case SKEW: skewFactory = DoubleAccumulator.DoubleDataSkewFactory.get(); break; default: log.warn( "Unsupported aggregation specified: {}", aggregation); } } } MetricAccumulatorFactory metricAccumulatorFactory = new MetricAccumulatorFactory( minimumFactory, maximumFactory, averageFactory, sumFactory, skewFactory); return getAggregatedMetricValues( stores, requestedMetrics, metricAccumulatorFactory); } catch (Exception e) { log.warn("Could not retrieve metrics.", e); throw new CompletionException( new RestHandlerException( "Could not retrieve metrics.", HttpResponseStatus.INTERNAL_SERVER_ERROR)); } }, executor); }
Abstract request handler for querying aggregated metrics. Subclasses return either a list of all available metrics or the aggregated values of them across all/selected entities. <p>If the query parameters do not contain a "get" parameter the list of all metrics is returned. {@code [ { "id" : "X" } ] } <p>If the query parameters do contain a "get" parameter, a comma-separated list of metric names is expected as a value. {@code /metrics?get=X,Y} The handler will then return a list containing the values of the requested metrics. {@code [ { "id" : "X", "value" : "S" }, { "id" : "Y", "value" : "T" } ] } <p>The "agg" query parameter is used to define which aggregates should be calculated. Available aggregations are "sum", "max", "min" and "avg". If the parameter is not specified, all aggregations will be returned. {@code /metrics?get=X,Y&agg=min,max} The handler will then return a list of objects containing the aggregations for the requested metrics. {@code [ { "id" : "X", "min", "1", "max", "2" }, { "id" : "Y", "min", "4", "max", "10"}]}
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AbstractAggregatingMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AbstractAggregatingMetricsHandler.java
Apache-2.0
private static Collection<String> getAvailableMetrics( Collection<? extends MetricStore.ComponentMetricStore> stores) { Set<String> uniqueMetrics = CollectionUtil.newHashSetWithExpectedSize(32); for (MetricStore.ComponentMetricStore store : stores) { uniqueMetrics.addAll(store.metrics.keySet()); } return uniqueMetrics; }
Returns a JSON string containing a list of all available metrics in the given stores. Effectively this method maps the union of all key-sets to JSON. @param stores metrics @return JSON string containing a list of all available metrics
getAvailableMetrics
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AbstractAggregatingMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AbstractAggregatingMetricsHandler.java
Apache-2.0
@Override protected final CompletableFuture<MetricCollectionResponseBody> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { metricFetcher.update(); final MetricStore.ComponentMetricStore componentMetricStore = getComponentMetricStore(request, metricFetcher.getMetricStore()); if (componentMetricStore == null || componentMetricStore.metrics == null) { return CompletableFuture.completedFuture( new MetricCollectionResponseBody(Collections.emptyList())); } final Set<String> requestedMetrics = new HashSet<>(request.getQueryParameter(MetricsFilterParameter.class)); if (requestedMetrics.isEmpty()) { return CompletableFuture.completedFuture( new MetricCollectionResponseBody(getAvailableMetrics(componentMetricStore))); } else { final List<Metric> metrics = getRequestedMetrics(componentMetricStore, requestedMetrics); return CompletableFuture.completedFuture(new MetricCollectionResponseBody(metrics)); } }
Request handler that returns for a given task a list of all available metrics or the values for a set of metrics. <p>If the query parameters do not contain a "get" parameter the list of all metrics is returned. {@code {"available": [ { "name" : "X", "id" : "X" } ] } } <p>If the query parameters do contain a "get" parameter, a comma-separated list of metric names is expected as a value. {@code /metrics?get=X,Y} The handler will then return a list containing the values of the requested metrics. {@code [ { "id" : "X", "value" : "S" }, { "id" : "Y", "value" : "T" } ] } @param <M> Type of the concrete {@link MessageParameters}
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AbstractMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AbstractMetricsHandler.java
Apache-2.0
@Nonnull @Override Collection<? extends MetricStore.ComponentMetricStore> getStores( MetricStore store, HandlerRequest<EmptyRequestBody> request) { List<JobID> jobs = request.getQueryParameter(JobsFilterQueryParameter.class); if (jobs.isEmpty()) { return store.getJobs().values(); } else { Collection<MetricStore.ComponentMetricStore> jobStores = new ArrayList<>(jobs.size()); for (JobID job : jobs) { MetricStore.ComponentMetricStore jobMetricStore = store.getJobMetricStore(job.toString()); if (jobMetricStore != null) { jobStores.add(jobMetricStore); } } return jobStores; } }
Request handler that returns, aggregated across jobs, a list of all available metrics or the values for a set of metrics. <p>Specific jobs can be selected for aggregation by specifying a comma-separated list of job IDs. {@code /metrics?get=X,Y&jobs=A,B}
getStores
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AggregatingJobsMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AggregatingJobsMetricsHandler.java
Apache-2.0
@Nonnull @Override Collection<? extends MetricStore.ComponentMetricStore> getStores( MetricStore store, HandlerRequest<EmptyRequestBody> request) { List<ResourceID> taskmanagers = request.getQueryParameter(TaskManagersFilterQueryParameter.class); if (taskmanagers.isEmpty()) { return store.getTaskManagers().values(); } else { Collection<MetricStore.TaskManagerMetricStore> taskmanagerStores = new ArrayList<>(taskmanagers.size()); for (ResourceID taskmanager : taskmanagers) { MetricStore.TaskManagerMetricStore taskManagerMetricStore = store.getTaskManagerMetricStore(taskmanager.getResourceIdString()); if (taskManagerMetricStore != null) { taskmanagerStores.add(taskManagerMetricStore); } } return taskmanagerStores; } }
Request handler that returns, aggregated across task managers, a list of all available metrics or the values for a set of metrics. <p>Specific taskmanagers can be selected for aggregation by specifying a comma-separated list of taskmanager IDs. {@code /metrics?get=X,Y&taskmanagers=A,B}
getStores
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AggregatingTaskManagersMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/AggregatingTaskManagersMetricsHandler.java
Apache-2.0
@Nullable @Override protected MetricStore.ComponentMetricStore getComponentMetricStore( HandlerRequest<EmptyRequestBody> request, MetricStore metricStore) { return metricStore.getJobManagerOperatorMetricStore( request.getPathParameter(JobIDPathParameter.class).toString(), request.getPathParameter(JobVertexIdPathParameter.class).toString()); }
Handler that returns job manager operator metrics.
getComponentMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobManagerOperatorMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobManagerOperatorMetricsHandler.java
Apache-2.0
@Nullable @Override protected MetricStore.ComponentMetricStore getComponentMetricStore( final HandlerRequest<EmptyRequestBody> request, final MetricStore metricStore) { return metricStore.getJobMetricStore( request.getPathParameter(JobIDPathParameter.class).toString()); }
Request handler that returns for a given job a list of all available metrics or the values for a set of metrics.
getComponentMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobMetricsHandler.java
Apache-2.0
@Override protected MetricStore.ComponentMetricStore getComponentMetricStore( HandlerRequest<EmptyRequestBody> request, MetricStore metricStore) { final JobID jobId = request.getPathParameter(JobIDPathParameter.class); final JobVertexID vertexId = request.getPathParameter(JobVertexIdPathParameter.class); return metricStore.getTaskMetricStore(jobId.toString(), vertexId.toString()); }
Handler that returns metrics given a {@link JobID} and {@link JobVertexID}. @see MetricStore#getTaskMetricStore(String, String) @deprecated This class is subsumed by {@link SubtaskMetricsHandler} and is only kept for backwards-compatibility.
getComponentMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobVertexMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobVertexMetricsHandler.java
Apache-2.0
@Override protected MetricCollectionResponseBody handleRequest( HandlerRequest<EmptyRequestBody> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException { String jobID = request.getPathParameter(JobIDPathParameter.class).toString(); String taskID = jobVertex.getJobVertexId().toString(); metricFetcher.update(); MetricStore.TaskMetricStore taskMetricStore = metricFetcher.getMetricStore().getTaskMetricStore(jobID, taskID); if (taskMetricStore == null) { return new MetricCollectionResponseBody(Collections.emptyList()); } AccessExecutionVertex[] taskVertices = jobVertex.getTaskVertices(); List<Metric> metrics = new ArrayList<>(taskVertices.length); for (AccessExecutionVertex taskVertex : taskVertices) { String id = taskVertex.getParallelSubtaskIndex() + "." + MetricNames.IO_CURRENT_INPUT_WATERMARK; String watermarkValue = taskMetricStore.getMetric(id); if (watermarkValue != null) { metrics.add(new Metric(id, watermarkValue)); } } return new MetricCollectionResponseBody(metrics); }
Handler that returns the watermarks given a {@link JobID} and {@link JobVertexID}.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobVertexWatermarksHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/JobVertexWatermarksHandler.java
Apache-2.0
@Nullable @Override protected MetricStore.ComponentMetricStore getComponentMetricStore( HandlerRequest<EmptyRequestBody> request, MetricStore metricStore) { final JobID jobId = request.getPathParameter(JobIDPathParameter.class); final JobVertexID vertexId = request.getPathParameter(JobVertexIdPathParameter.class); final int subtaskIndex = request.getPathParameter(SubtaskIndexPathParameter.class); return metricStore.getSubtaskMetricStore( jobId.toString(), vertexId.toString(), subtaskIndex); }
Handler that returns subtask metrics. @see MetricStore#getSubtaskMetricStore(String, String, int)
getComponentMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/SubtaskMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/SubtaskMetricsHandler.java
Apache-2.0
@Nullable @Override protected MetricStore.ComponentMetricStore getComponentMetricStore( final HandlerRequest<EmptyRequestBody> request, final MetricStore metricStore) { final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); return metricStore.getTaskManagerMetricStore(taskManagerId.toString()); }
Handler that returns TaskManager metrics. @see MetricStore#getTaskManagerMetricStore(String)
getComponentMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/TaskManagerMetricsHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/metrics/TaskManagerMetricsHandler.java
Apache-2.0
private static RestHandlerException featureDisabledException() { return new RestHandlerException( "Rescaling is temporarily disabled. See FLINK-12312.", HttpResponseStatus.SERVICE_UNAVAILABLE); }
Rest handler to trigger and poll the rescaling of a running job.
featureDisabledException
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingHandlers.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingHandlers.java
Apache-2.0
@Override public Collection<MessagePathParameter<?>> getPathParameters() { return Arrays.asList(jobPathParameter, triggerIdPathParameter); }
{@link MessageParameters} for polling the status of a rescaling operation.
getPathParameters
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingStatusMessageParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingStatusMessageParameters.java
Apache-2.0
@Override public Collection<MessageQueryParameter<?>> getQueryParameters() { return Collections.singleton(rescalingParallelismQueryParameter); }
{@link MessageParameters} for triggering the rescaling of a job.
getQueryParameters
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingTriggerMessageParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingTriggerMessageParameters.java
Apache-2.0
public static void sendNotModified(ChannelHandlerContext ctx) { FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, NOT_MODIFIED); setDateHeader(response); // close the connection as soon as the error message is sent. ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); }
Send the "304 Not Modified" response. This response can be used when the file timestamp is the same as what the browser is sending up. @param ctx The channel context to write the response to.
sendNotModified
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
Apache-2.0
public static void setDateHeader(FullHttpResponse response) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US); dateFormatter.setTimeZone(GMT_TIMEZONE); Calendar time = new GregorianCalendar(); response.headers().set(DATE, dateFormatter.format(time.getTime())); }
Sets the "date" header for the HTTP response. @param response HTTP response
setDateHeader
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
Apache-2.0
public static void setDateAndCacheHeaders(HttpResponse response, File fileToCache) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US); dateFormatter.setTimeZone(GMT_TIMEZONE); // date header Calendar time = new GregorianCalendar(); response.headers().set(DATE, dateFormatter.format(time.getTime())); // cache headers time.add(Calendar.SECOND, HTTP_CACHE_SECONDS); response.headers().set(EXPIRES, dateFormatter.format(time.getTime())); response.headers().set(CACHE_CONTROL, "private, max-age=" + HTTP_CACHE_SECONDS); response.headers() .set(LAST_MODIFIED, dateFormatter.format(new Date(fileToCache.lastModified()))); }
Sets the "date" and "cache" headers for the HTTP Response. @param response The HTTP response object. @param fileToCache File to extract the modification timestamp from.
setDateAndCacheHeaders
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
Apache-2.0
public static void setContentTypeHeader(HttpResponse response, File file) { String mimeType = MimeTypes.getMimeTypeForFileName(file.getName()); String mimeFinal = mimeType != null ? mimeType : MimeTypes.getDefaultMimeType(); response.headers().set(CONTENT_TYPE, mimeFinal); }
Sets the content type header for the HTTP Response. @param response HTTP response @param file file to extract content type
setContentTypeHeader
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/files/StaticFileServerHandler.java
Apache-2.0
@Override public MetricStore getMetricStore() { return metrics; }
Returns the MetricStore containing all stored metrics. @return MetricStore containing all stored metrics;
getMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricFetcherImpl.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricFetcherImpl.java
Apache-2.0
private CompletableFuture<Void> retrieveAndQueryMetrics(String queryServiceAddress) { LOG.debug("Retrieve metric query service gateway for {}", queryServiceAddress); final CompletableFuture<MetricQueryServiceGateway> queryServiceGatewayFuture = queryServiceRetriever.retrieveService(queryServiceAddress); return queryServiceGatewayFuture.thenComposeAsync(this::queryMetrics, executor); }
Retrieves and queries the specified QueryServiceGateway. @param queryServiceAddress specifying the QueryServiceGateway
retrieveAndQueryMetrics
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricFetcherImpl.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricFetcherImpl.java
Apache-2.0
synchronized void addAll(List<MetricDump> metricDumps) { for (MetricDump metric : metricDumps) { add(metric); } }
Add metric dumps to the store. @param metricDumps to add.
addAll
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
Apache-2.0
public synchronized TaskManagerMetricStore getTaskManagerMetricStore(String tmID) { return tmID == null ? null : TaskManagerMetricStore.unmodifiable(taskManagers.get(tmID)); }
Returns the {@link TaskManagerMetricStore} for the given taskmanager ID. @param tmID taskmanager ID @return TaskManagerMetricStore for the given ID, or null if no store for the given argument exists
getTaskManagerMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
Apache-2.0
public synchronized ComponentMetricStore getJobMetricStore(String jobID) { return jobID == null ? null : ComponentMetricStore.unmodifiable(jobs.get(jobID)); }
Returns the {@link ComponentMetricStore} for the given job ID. @param jobID job ID @return ComponentMetricStore for the given ID, or null if no store for the given argument exists
getJobMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
Apache-2.0
public synchronized TaskMetricStore getTaskMetricStore(String jobID, String taskID) { JobMetricStore job = jobID == null ? null : jobs.get(jobID); if (job == null || taskID == null) { return null; } return TaskMetricStore.unmodifiable(job.getTaskMetricStore(taskID)); }
Returns the {@link ComponentMetricStore} for the given job/task ID. @param jobID job ID @param taskID task ID @return ComponentMetricStore for given IDs, or null if no store for the given arguments exists
getTaskMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
Apache-2.0
private static TaskManagerMetricStore unmodifiable(TaskManagerMetricStore source) { if (source == null) { return null; } return new TaskManagerMetricStore( unmodifiableMap(source.metrics), unmodifiableSet(source.garbageCollectorNames)); }
Sub-structure containing metrics of a single TaskManager.
unmodifiable
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
Apache-2.0
public TaskMetricStore getTaskMetricStore(String taskID) { return taskID == null ? null : tasks.get(taskID); }
Sub-structure containing metrics of a single Job.
getTaskMetricStore
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/metrics/MetricStore.java
Apache-2.0
public Map<PathPattern, T> routes() { return Collections.unmodifiableMap(routes); }
Returns all routes in this router, an unmodifiable map of {@code PathPattern -> Target}.
routes
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MethodlessRouter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MethodlessRouter.java
Apache-2.0
public MethodlessRouter<T> addRoute(String pathPattern, T target) { PathPattern p = new PathPattern(pathPattern); if (routes.containsKey(p)) { return this; } routes.put(p, target); return this; }
This method does nothing if the path pattern has already been added. A path pattern can only point to one target.
addRoute
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MethodlessRouter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MethodlessRouter.java
Apache-2.0
public RouteResult<T> route( String uri, String decodedPath, Map<String, List<String>> queryParameters, String[] pathTokens) { // Optimize: reuse requestPathTokens and pathParams in the loop Map<String, String> pathParams = new HashMap<>(); for (Entry<PathPattern, T> entry : routes.entrySet()) { PathPattern pattern = entry.getKey(); if (pattern.match(pathTokens, pathParams)) { T target = entry.getValue(); return new RouteResult<T>(uri, decodedPath, pathParams, queryParameters, target); } // Reset for the next try pathParams.clear(); } return null; }
@return {@code null} if no match
route
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MethodlessRouter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MethodlessRouter.java
Apache-2.0
public boolean isPostRoute(String requestUri) { return checkRoutes(requestUri, postRoutes); }
Returns <code>true</code> if the handler at the provided <code>requestUri</code> endpoint accepts POST requests. @param requestUri URI for the request
isPostRoute
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MultipartRoutes.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/MultipartRoutes.java
Apache-2.0
public static String removeSlashesAtBothEnds(String path) { checkNotNull(path, "path"); if (path.isEmpty()) { return path; } int beginIndex = 0; while (beginIndex < path.length() && path.charAt(beginIndex) == '/') { beginIndex++; } if (beginIndex == path.length()) { return ""; } int endIndex = path.length() - 1; while (endIndex > beginIndex && path.charAt(endIndex) == '/') { endIndex--; } return path.substring(beginIndex, endIndex + 1); }
The pattern must not contain query, example: {@code constant1/constant2?foo=bar}. <p>The pattern will be stored without slashes at both ends.
removeSlashesAtBothEnds
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/PathPattern.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/PathPattern.java
Apache-2.0
public String pattern() { return pattern; }
Returns the pattern given at the constructor, without slashes at both ends.
pattern
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/PathPattern.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/PathPattern.java
Apache-2.0
public boolean match(String[] requestPathTokens, Map<String, String> params) { if (tokens.length == requestPathTokens.length) { for (int i = 0; i < tokens.length; i++) { String key = tokens[i]; String value = requestPathTokens[i]; if (key.length() > 0 && key.charAt(0) == ':') { // This is a placeholder params.put(key.substring(1), value); } else if (!key.equals(value)) { // This is a constant return false; } } return true; } if (tokens.length > 0 && tokens[tokens.length - 1].equals(":*") && tokens.length <= requestPathTokens.length) { // The first part for (int i = 0; i < tokens.length - 2; i++) { String key = tokens[i]; String value = requestPathTokens[i]; if (key.length() > 0 && key.charAt(0) == ':') { // This is a placeholder params.put(key.substring(1), value); } else if (!key.equals(value)) { // This is a constant return false; } } // The last :* part StringBuilder b = new StringBuilder(requestPathTokens[tokens.length - 1]); for (int i = tokens.length; i < requestPathTokens.length; i++) { b.append('/'); b.append(requestPathTokens[i]); } params.put("*", b.toString()); return true; } return false; }
{@code params} will be updated with params embedded in the request path. <p>This method signature is designed so that {@code requestPathTokens} and {@code params} can be created only once then reused, to optimize for performance when a large number of path patterns need to be matched. @return {@code false} if not matched; in this case params should be reset
match
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/PathPattern.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/PathPattern.java
Apache-2.0
private static String targetToString(Object target) { if (target instanceof Class) { return ((Class<?>) target).getName(); } else { return target.toString(); } }
Helper for toString. <p>For example, returns "io.netty.example.http.router.HttpRouterServerHandler" instead of "class io.netty.example.http.router.HttpRouterServerHandler"
targetToString
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
Apache-2.0
public T notFound() { return notFound; }
Returns the fallback target for use when there's no match at {@link #route(HttpMethod, String)}.
notFound
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
Apache-2.0
public int size() { int ret = anyMethodRouter.size(); for (MethodlessRouter<T> router : routers.values()) { ret += router.size(); } return ret; }
Returns the number of routes in this router.
size
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
Apache-2.0
public Router<T> addRoute(HttpMethod method, String pathPattern, T target) { getMethodlessRouter(method).addRoute(pathPattern, target); return this; }
Add route. <p>A path pattern can only point to one target. This method does nothing if the pattern has already been added.
addRoute
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
Apache-2.0
public Set<HttpMethod> allowedMethods(String uri) { QueryStringDecoder decoder = new QueryStringDecoder(uri); String[] tokens = PathPattern.removeSlashesAtBothEnds(decoder.path()).split("/"); if (anyMethodRouter.anyMatched(tokens)) { return allAllowedMethods(); } Set<HttpMethod> ret = new HashSet<HttpMethod>(routers.size()); for (Map.Entry<HttpMethod, MethodlessRouter<T>> entry : routers.entrySet()) { MethodlessRouter<T> router = entry.getValue(); if (router.anyMatched(tokens)) { HttpMethod method = entry.getKey(); ret.add(method); } } return ret; }
Returns allowed methods for a specific URI. <p>For {@code OPTIONS *}, use {@link #allAllowedMethods()} instead of this method.
allowedMethods
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
Apache-2.0
public Set<HttpMethod> allAllowedMethods() { if (anyMethodRouter.size() > 0) { Set<HttpMethod> ret = new HashSet<HttpMethod>(9); ret.add(HttpMethod.CONNECT); ret.add(HttpMethod.DELETE); ret.add(HttpMethod.GET); ret.add(HttpMethod.HEAD); ret.add(HttpMethod.OPTIONS); ret.add(HttpMethod.PATCH); ret.add(HttpMethod.POST); ret.add(HttpMethod.PUT); ret.add(HttpMethod.TRACE); return ret; } else { return new HashSet<HttpMethod>(routers.keySet()); } }
Returns all methods that this router handles. For {@code OPTIONS *}.
allAllowedMethods
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/Router.java
Apache-2.0
public Map<String, String> pathParams() { return pathParams; }
Returns all params embedded in the request path.
pathParams
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
Apache-2.0
public String queryParam(String name) { List<String> values = queryParams.get(name); return (values == null) ? null : values.get(0); }
Extracts the first matching param in {@code queryParams}. @return {@code null} if there's no match
queryParam
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
Apache-2.0
public String param(String name) { String pathValue = pathParams.get(name); return (pathValue == null) ? queryParam(name) : pathValue; }
Extracts the param in {@code pathParams} first, then falls back to the first matching param in {@code queryParams}. @return {@code null} if there's no match
param
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
Apache-2.0
public List<String> params(String name) { List<String> values = queryParams.get(name); String value = pathParams.get(name); if (values == null) { return (value == null) ? Collections.<String>emptyList() : Collections.singletonList(value); } if (value == null) { return Collections.unmodifiableList(values); } else { List<String> aggregated = new ArrayList<String>(values.size() + 1); aggregated.addAll(values); aggregated.add(value); return Collections.unmodifiableList(aggregated); } }
Extracts all params in {@code pathParams} and {@code queryParams} matching the name. @return Unmodifiable list; the list is empty if there's no match
params
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/router/RouteResult.java
Apache-2.0
@Override protected CompletableFuture<TransientBlobKey> requestFileUpload( ResourceManagerGateway resourceManagerGateway, Tuple2<ResourceID, String> taskManagerIdAndFileName) { return resourceManagerGateway.requestTaskManagerFileUploadByType( taskManagerIdAndFileName.f0, FileType.LOG, timeout); }
Rest handler which serves the log files from {@link TaskExecutor}.
requestFileUpload
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerLogFileHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerLogFileHandler.java
Apache-2.0
@Override protected CompletableFuture<LogListInfo> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); final ResourceManagerGateway resourceManagerGateway = getResourceManagerGateway(resourceManagerGatewayRetriever); final CompletableFuture<Collection<LogInfo>> logsWithLengthFuture = resourceManagerGateway.requestTaskManagerLogList(taskManagerId, timeout); return logsWithLengthFuture .thenApply(LogListInfo::new) .exceptionally( (throwable) -> { final Throwable strippedThrowable = ExceptionUtils.stripCompletionException(throwable); if (strippedThrowable instanceof UnknownTaskExecutorException) { throw new CompletionException( new RestHandlerException( "Could not find TaskExecutor " + taskManagerId, HttpResponseStatus.NOT_FOUND, strippedThrowable)); } else { throw new CompletionException(throwable); } }); }
Handler which serves detailed TaskManager log list information.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerLogListHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerLogListHandler.java
Apache-2.0
@Override protected CompletableFuture<ProfilingInfo> handleRequest( @Nonnull HandlerRequest<ProfilingRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { ProfilingRequestBody profilingRequest = request.getRequestBody(); int duration = profilingRequest.getDuration(); if (duration <= 0 || duration > maxDurationInSeconds) { return FutureUtils.completedExceptionally( new IllegalArgumentException( String.format( "`duration` must be set between (0s, %ds].", maxDurationInSeconds))); } final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); return gateway.requestProfiling( taskManagerId, duration, profilingRequest.getMode(), getTimeout()); }
Rest handler which serves the profiling service from a {@link TaskExecutor}.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerProfilingHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerProfilingHandler.java
Apache-2.0
@Override protected CompletableFuture<ProfilingInfoList> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); final ResourceManagerGateway resourceManagerGateway = getResourceManagerGateway(resourceManagerGatewayRetriever); final CompletableFuture<Collection<ProfilingInfo>> profilingListFuture = resourceManagerGateway.requestTaskManagerProfilingList(taskManagerId, getTimeout()); return profilingListFuture .thenApply(ProfilingInfoList::new) .exceptionally( (throwable) -> { final Throwable strippedThrowable = ExceptionUtils.stripCompletionException(throwable); if (strippedThrowable instanceof UnknownTaskExecutorException) { throw new CompletionException( new RestHandlerException( "Could not find TaskExecutor " + taskManagerId, HttpResponseStatus.NOT_FOUND, strippedThrowable)); } else { throw new CompletionException(throwable); } }); }
Handler which serves detailed TaskManager profiling list information.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerProfilingListHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerProfilingListHandler.java
Apache-2.0
@Override protected CompletableFuture<TaskManagersInfo> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { return gateway.requestTaskManagerInfo(timeout).thenApply(TaskManagersInfo::new); }
Returns an overview over all registered TaskManagers of the cluster.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagersHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagersHandler.java
Apache-2.0
@Override protected CompletableFuture<ThreadDumpInfo> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); return gateway.requestThreadDump(taskManagerId, timeout); }
Rest handler which serves the thread dump info from a {@link TaskExecutor}.
handleRequest
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerThreadDumpHandler.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/taskmanager/TaskManagerThreadDumpHandler.java
Apache-2.0
public static <T> T fromRequestBodyOrQueryParameter( T requestValue, SupplierWithException<T, RestHandlerException> queryParameterExtractor, T defaultValue, Logger log) throws RestHandlerException { if (requestValue != null) { return requestValue; } else { T queryParameterValue = queryParameterExtractor.get(); if (queryParameterValue != null) { log.warn( "Configuring the job submission via query parameters is deprecated." + " Please migrate to submitting a JSON request instead."); return queryParameterValue; } else { return defaultValue; } } }
Returns {@code requestValue} if it is not null, otherwise returns the query parameter value if it is not null, otherwise returns the default value.
fromRequestBodyOrQueryParameter
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerRequestUtils.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerRequestUtils.java
Apache-2.0
public static <P extends ResponseBody> CompletableFuture<Void> sendResponse( ChannelHandlerContext channelHandlerContext, HttpRequest httpRequest, P response, HttpResponseStatus statusCode, Map<String, String> headers) { StringWriter sw = new StringWriter(); try { mapper.writeValue(sw, response); } catch (IOException ioe) { LOG.error("Internal server error. Could not map response to JSON.", ioe); return sendErrorResponse( channelHandlerContext, httpRequest, new ErrorResponseBody("Internal server error. Could not map response to JSON."), HttpResponseStatus.INTERNAL_SERVER_ERROR, headers); } return sendResponse(channelHandlerContext, httpRequest, sw.toString(), statusCode, headers); }
Sends the given response and status code to the given channel. @param channelHandlerContext identifying the open channel @param httpRequest originating http request @param response which should be sent @param statusCode of the message to send @param headers additional header values @param <P> type of the response
sendResponse
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
Apache-2.0
public static CompletableFuture<Void> sendErrorResponse( ChannelHandlerContext channelHandlerContext, HttpRequest httpRequest, ErrorResponseBody errorMessage, HttpResponseStatus statusCode, Map<String, String> headers) { return sendErrorResponse( channelHandlerContext, HttpUtil.isKeepAlive(httpRequest), errorMessage, statusCode, headers); }
Sends the given error response and status code to the given channel. @param channelHandlerContext identifying the open channel @param httpRequest originating http request @param errorMessage which should be sent @param statusCode of the message to send @param headers additional header values
sendErrorResponse
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
Apache-2.0
public static CompletableFuture<Void> sendErrorResponse( ChannelHandlerContext channelHandlerContext, boolean keepAlive, ErrorResponseBody errorMessage, HttpResponseStatus statusCode, Map<String, String> headers) { StringWriter sw = new StringWriter(); try { mapper.writeValue(sw, errorMessage); } catch (IOException e) { // this should never happen LOG.error("Internal server error. Could not map error response to JSON.", e); return sendResponse( channelHandlerContext, keepAlive, "Internal server error. Could not map error response to JSON.", HttpResponseStatus.INTERNAL_SERVER_ERROR, headers); } return sendResponse(channelHandlerContext, keepAlive, sw.toString(), statusCode, headers); }
Sends the given error response and status code to the given channel. @param channelHandlerContext identifying the open channel @param keepAlive If the connection should be kept alive. @param errorMessage which should be sent @param statusCode of the message to send @param headers additional header values
sendErrorResponse
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
Apache-2.0
public static CompletableFuture<Void> sendResponse( @Nonnull ChannelHandlerContext channelHandlerContext, @Nonnull HttpRequest httpRequest, @Nonnull String message, @Nonnull HttpResponseStatus statusCode, @Nonnull Map<String, String> headers) { return sendResponse( channelHandlerContext, HttpUtil.isKeepAlive(httpRequest), message, statusCode, headers); }
Sends the given response and status code to the given channel. @param channelHandlerContext identifying the open channel @param httpRequest originating http request @param message which should be sent @param statusCode of the message to send @param headers additional header values
sendResponse
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/HandlerUtils.java
Apache-2.0
public static String getMimeTypeForExtension(String fileExtension) { return MIME_MAP.get(fileExtension.toLowerCase()); }
Gets the MIME type for the file with the given extension. If the mime type is not recognized, this method returns null. @param fileExtension The file extension. @return The MIME type, or {@code null}, if the file extension is not recognized.
getMimeTypeForExtension
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MimeTypes.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MimeTypes.java
Apache-2.0
public static String getMimeTypeForFileName(String fileName) { int extensionPos = fileName.lastIndexOf('.'); if (extensionPos >= 1 && extensionPos < fileName.length() - 1) { String extension = fileName.substring(extensionPos + 1); return getMimeTypeForExtension(extension); } else { return null; } }
Gets the MIME type for the file with the given name, by extension. This method tries to extract the file extension and then use the {@link #getMimeTypeForExtension(String)} to determine the MIME type. If the extension cannot be determined, or the extension is unrecognized, this method return {@code null}. @param fileName The file name. @return The MIME type, or {@code null}, if the file's extension is not recognized.
getMimeTypeForFileName
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MimeTypes.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MimeTypes.java
Apache-2.0
public static String getDefaultMimeType() { return DEFAULT_MIME_TYPE; }
Gets the default MIME type, which is {@code "application/octet-stream"}. @return The default MIME type.
getDefaultMimeType
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MimeTypes.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MimeTypes.java
Apache-2.0
public void addIOMetrics( AccessExecution attempt, @Nullable MetricFetcher fetcher, String jobID, String taskID) { if (attempt.getState().isTerminal()) { IOMetrics ioMetrics = attempt.getIOMetrics(); if (ioMetrics != null) { // execAttempt is already finished, use final metrics stored in // ExecutionGraph this.numBytesIn += ioMetrics.getNumBytesIn(); this.numBytesOut += ioMetrics.getNumBytesOut(); this.numRecordsIn += ioMetrics.getNumRecordsIn(); this.numRecordsOut += ioMetrics.getNumRecordsOut(); this.accumulateBackPressuredTime += ioMetrics.getAccumulateBackPressuredTime(); this.accumulateIdleTime += ioMetrics.getAccumulateIdleTime(); if (Double.isNaN(ioMetrics.getAccumulateBusyTime())) { this.accumulateBusyTime = Double.NaN; } else { this.accumulateBusyTime += ioMetrics.getAccumulateBusyTime(); } } } else { // execAttempt is still running, use MetricQueryService instead if (fetcher != null) { fetcher.update(); MetricStore.ComponentMetricStore metrics = fetcher.getMetricStore() .getSubtaskAttemptMetricStore( jobID, taskID, attempt.getParallelSubtaskIndex(), attempt.getAttemptNumber()); if (metrics != null) { /** * We want to keep track of missing metrics to be able to make a difference * between 0 as a value and a missing value. In case a metric is missing for a * parallel instance of a task, we set the complete flag as false. */ if (metrics.getMetric(MetricNames.IO_NUM_BYTES_IN) == null) { this.numBytesInComplete = false; } else { this.numBytesIn += Long.valueOf(metrics.getMetric(MetricNames.IO_NUM_BYTES_IN)); } if (metrics.getMetric(MetricNames.IO_NUM_BYTES_OUT) == null) { this.numBytesOutComplete = false; } else { this.numBytesOut += Long.valueOf(metrics.getMetric(MetricNames.IO_NUM_BYTES_OUT)); } if (metrics.getMetric(MetricNames.IO_NUM_RECORDS_IN) == null) { this.numRecordsInComplete = false; } else { this.numRecordsIn += Long.valueOf(metrics.getMetric(MetricNames.IO_NUM_RECORDS_IN)); } if (metrics.getMetric(MetricNames.IO_NUM_RECORDS_OUT) == null) { this.numRecordsOutComplete = false; } else { this.numRecordsOut += Long.valueOf(metrics.getMetric(MetricNames.IO_NUM_RECORDS_OUT)); } if (metrics.getMetric(MetricNames.ACC_TASK_BACK_PRESSURED_TIME) != null) { this.accumulateBackPressuredTime += Long.parseLong( metrics.getMetric( MetricNames.ACC_TASK_BACK_PRESSURED_TIME)); } if (metrics.getMetric(MetricNames.ACC_TASK_IDLE_TIME) != null) { this.accumulateIdleTime += Long.parseLong(metrics.getMetric(MetricNames.ACC_TASK_IDLE_TIME)); } if (metrics.getMetric(MetricNames.ACC_TASK_BUSY_TIME) != null) { double busyTime = Double.parseDouble( metrics.getMetric(MetricNames.ACC_TASK_BUSY_TIME)); if (Double.isNaN(busyTime)) { this.accumulateBusyTime = Double.NaN; } else { this.accumulateBusyTime += busyTime; } } } else { this.numBytesInComplete = false; this.numBytesOutComplete = false; this.numRecordsInComplete = false; this.numRecordsOutComplete = false; } } } }
Adds the IO metrics for the given attempt to this object. If the {@link AccessExecution} is in a terminal state the contained {@link IOMetrics} object is added. Otherwise the given {@link MetricFetcher} is used to retrieve the required metrics. @param attempt Attempt whose IO metrics should be added @param fetcher MetricFetcher to retrieve metrics for running jobs @param jobID JobID to which the attempt belongs @param taskID TaskID to which the attempt belongs
addIOMetrics
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MutableIOMetrics.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/util/MutableIOMetrics.java
Apache-2.0
public void setCustomHeaders(Collection<HttpHeader> customHeaders) { this.customHeaders = customHeaders; }
Sets the custom headers for the message. @param customHeaders A collection of custom headers.
setCustomHeaders
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/CustomHeadersDecorator.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/CustomHeadersDecorator.java
Apache-2.0
public static EmptyRequestBody getInstance() { return INSTANCE; }
Request which do not have a request payload.
getInstance
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/EmptyRequestBody.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/EmptyRequestBody.java
Apache-2.0
@JsonIgnore public long getExpiredTimestamp() { return expiredTimestamp; }
{@link RequestBody} to report heartbeat for client.
getExpiredTimestamp
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobClientHeartbeatRequestBody.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobClientHeartbeatRequestBody.java
Apache-2.0
@Override public void serialize( JobConfigInfo jobConfigInfo, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeStringField(FIELD_NAME_JOB_ID, jobConfigInfo.getJobId().toString()); jsonGenerator.writeStringField(FIELD_NAME_JOB_NAME, jobConfigInfo.getJobName()); if (jobConfigInfo.getExecutionConfigInfo() != null) { jsonGenerator.writeObjectField( FIELD_NAME_EXECUTION_CONFIG, jobConfigInfo.getExecutionConfigInfo()); } jsonGenerator.writeEndObject(); }
Json serializer for the {@link JobConfigInfo}.
serialize
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobConfigInfo.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobConfigInfo.java
Apache-2.0
@Override public JobConfigInfo deserialize( JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode rootNode = jsonParser.readValueAsTree(); final JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).asText()); final String jobName = rootNode.get(FIELD_NAME_JOB_NAME).asText(); final ExecutionConfigInfo executionConfigInfo; if (rootNode.has(FIELD_NAME_EXECUTION_CONFIG)) { executionConfigInfo = RestMapperUtils.getStrictObjectMapper() .treeToValue( rootNode.get(FIELD_NAME_EXECUTION_CONFIG), ExecutionConfigInfo.class); } else { executionConfigInfo = null; } return new JobConfigInfo(jobId, jobName, executionConfigInfo); }
Json deserializer for the {@link JobConfigInfo}.
deserialize
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobConfigInfo.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobConfigInfo.java
Apache-2.0
@Override public RawJson deserialize( JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { final JsonNode rootNode = jsonParser.readValueAsTree(); return new RawJson(rootNode.toString()); }
Json deserializer for the {@link RawJson}.
deserialize
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobPlanInfo.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobPlanInfo.java
Apache-2.0
@Override public Collection<MessagePathParameter<?>> getPathParameters() { return Arrays.asList(jobPathParameter, taskManagerIdParameter); }
Message parameters which require a job path parameter and a TaskManager id path parameter.
getPathParameters
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobTaskManagerMessageParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobTaskManagerMessageParameters.java
Apache-2.0
@Override public Collection<MessageQueryParameter<?>> getQueryParameters() { return Arrays.asList(flameGraphTypeQueryParameter, subtaskIndexQueryParameter); }
Message parameters for job vertex Flame Graph REST handler.
getQueryParameters
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobVertexFlameGraphParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobVertexFlameGraphParameters.java
Apache-2.0
default Collection<Class<?>> getResponseTypeParameters() { return Collections.emptyList(); }
Returns the collection of type parameters for the response type. @return Collection of type parameters for the response type
getResponseTypeParameters
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
Apache-2.0
default String operationId() { final String className = getClass().getSimpleName(); if (getHttpMethod() != HttpMethodWrapper.GET) { throw new UnsupportedOperationException( "The default implementation is only supported for GET calls. Please override 'operationId()' in '" + className + "'."); } final int headersSuffixStart = className.lastIndexOf("Headers"); if (headersSuffixStart == -1) { throw new IllegalStateException( "Expect name of class " + getClass() + " to end on 'Headers'. Please rename the class or override 'operationId()'."); } return getHttpMethod().name().toLowerCase(Locale.ROOT) + className.substring(0, headersSuffixStart); }
Returns a short description for this header suitable for method code generation. @return short description
operationId
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
Apache-2.0
default Collection<HttpHeader> getCustomHeaders() { return Collections.emptyList(); }
Returns a collection of custom HTTP headers. <p>This default implementation returns an empty list. Override this method to provide custom headers if needed. @return a collection of custom {@link HttpHeaders}, empty by default.
getCustomHeaders
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
Apache-2.0
public final boolean isResolved() { return resolved; }
Returns whether this parameter has been resolved. @return true, if this parameter was resolved, false otherwise
isResolved
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
Apache-2.0
public final void resolve(X value) { Preconditions.checkState(!resolved, "This parameter was already resolved."); this.value = Preconditions.checkNotNull(value); this.resolved = true; }
Resolves this parameter for the given value. @param value value to resolve this parameter with
resolve
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
Apache-2.0
public final String getKey() { return key; }
Returns the key of this parameter, e.g. "jobid". @return key of this parameter
getKey
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
Apache-2.0
public final X getValue() { return value; }
Returns the resolved value of this parameter, or {@code null} if it isn't resolved yet. @return resolved value, or null if it wasn't resolved yet
getValue
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
Apache-2.0
final String getValueAsString() { return value == null ? null : convertToString(value); }
Returns the resolved value of this parameter as a string, or {@code null} if it isn't resolved yet. @return resolved value, or null if it wasn't resolved yet
getValueAsString
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
Apache-2.0
public final boolean isMandatory() { return requisiteness == MessageParameterRequisiteness.MANDATORY; }
Returns whether this parameter must be resolved for the request. @return true if the parameter is mandatory, false otherwise
isMandatory
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameter.java
Apache-2.0
public final boolean isResolved() { return getPathParameters().stream() .filter(MessageParameter::isMandatory) .allMatch(MessageParameter::isResolved) && getQueryParameters().stream() .filter(MessageParameter::isMandatory) .allMatch(MessageParameter::isResolved); }
Returns whether all mandatory parameters have been resolved. @return true, if all mandatory parameters have been resolved, false otherwise
isResolved
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameters.java
Apache-2.0
public static String resolveUrl(String genericUrl, MessageParameters parameters) { Preconditions.checkState( parameters.isResolved(), "Not all mandatory message parameters were resolved."); StringBuilder path = new StringBuilder(genericUrl); StringBuilder queryParameters = new StringBuilder(); for (MessageParameter<?> pathParameter : parameters.getPathParameters()) { if (pathParameter.isResolved()) { int start = path.indexOf(':' + pathParameter.getKey()); final String pathValue = Preconditions.checkNotNull(pathParameter.getValueAsString()); // only replace path parameters if they are present if (start != -1) { path.replace(start, start + pathParameter.getKey().length() + 1, pathValue); } } } boolean isFirstQueryParameter = true; for (MessageQueryParameter<?> queryParameter : parameters.getQueryParameters()) { if (queryParameter.isResolved()) { if (isFirstQueryParameter) { queryParameters.append('?'); isFirstQueryParameter = false; } else { queryParameters.append('&'); } queryParameters.append(queryParameter.getKey()); queryParameters.append('='); queryParameters.append(queryParameter.getValueAsString()); } } path.append(queryParameters); return path.toString(); }
Resolves the given URL (e.g "jobs/:jobid") using the given path/query parameters. <p>This method will fail with an {@link IllegalStateException} if any mandatory parameter was not resolved. <p>Unresolved optional parameters will be ignored. @param genericUrl URL to resolve @param parameters message parameters parameters @return resolved url, e.g "/jobs/1234?state=running" @throws IllegalStateException if any mandatory parameter was not resolved
resolveUrl
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageParameters.java
Apache-2.0
@Override public List<X> convertFromString(String values) throws ConversionException { String[] splitValues = values.split(","); List<X> list = new ArrayList<>(); for (String value : splitValues) { list.add(convertStringToValue(value)); } return list; }
This class represents query parameters of a request. For example, the URL "/jobs?state=running" has a "state" query parameter, with "running" being its value string representation. <p>Query parameters may both occur multiple times or be of the form "key=value1,value2,value3". If a query parameter is specified multiple times the individual values are concatenated with {@code ,} and passed as a single value to {@link #convertToString(List)}.
convertFromString
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageQueryParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageQueryParameter.java
Apache-2.0
@Override public String convertToString(List<X> values) { StringBuilder sb = new StringBuilder(); boolean first = true; for (X value : values) { if (first) { sb.append(convertValueToString(value)); first = false; } else { sb.append(","); sb.append(convertValueToString(value)); } } return sb.toString(); }
Converts the given string to a valid value of this parameter. @param value string representation of parameter value @return parameter value
convertToString
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageQueryParameter.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageQueryParameter.java
Apache-2.0
public String getCode() { return this.name().toLowerCase(); }
Supported profiling mode in async-profiler.
getCode
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/ProfilingInfo.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/ProfilingInfo.java
Apache-2.0
@Override default Collection<RuntimeRestAPIVersion> getSupportedAPIVersions() { return Collections.singleton(RuntimeRestAPIVersion.V1); }
Message headers for a web handler request that belongs to runtime module. @param <R> type of the request @param <M> type of the message parameters
getSupportedAPIVersions
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/RuntimeUntypedResponseMessageHeaders.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/RuntimeUntypedResponseMessageHeaders.java
Apache-2.0
default boolean acceptsFileUploads() { return false; }
Returns whether this header allows file uploads. @return whether this header allows file uploads
acceptsFileUploads
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/UntypedResponseMessageHeaders.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/UntypedResponseMessageHeaders.java
Apache-2.0
@Override public Collection<MessagePathParameter<?>> getPathParameters() { return Arrays.asList(jobPathParameter, checkpointIdPathParameter); }
Message parameters for checkpoint related messages.
getPathParameters
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointMessageParameters.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointMessageParameters.java
Apache-2.0
public static RestAPICheckpointType valueOf( SnapshotType checkpointType, boolean isUnalignedCheckpoint) { if (checkpointType.isSavepoint()) { Preconditions.checkArgument( !isUnalignedCheckpoint, "Currently the savepoint doesn't support unaligned checkpoint."); SavepointType savepointType = (SavepointType) checkpointType; return savepointType.isSynchronous() ? SYNC_SAVEPOINT : SAVEPOINT; } if (isUnalignedCheckpoint) { return UNALIGNED_CHECKPOINT; } return CHECKPOINT; }
Backward compatibility layer between internal {@link CheckpointType} and a field used in {@link CheckpointStatistics}.
valueOf
java
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/checkpoints/CheckpointStatistics.java
Apache-2.0