This is an automated email from the ASF dual-hosted git repository.
chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/master by this push:
new 012c1d49722 [FLINK-27819][rest][docs] Use proper operationIds
012c1d49722 is described below
commit 012c1d497221c9d5348a0508aadf4c0486a21bda
Author: Chesnay Schepler <[email protected]>
AuthorDate: Wed Jun 15 08:35:08 2022 +0200
[FLINK-27819][rest][docs] Use proper operationIds
---
docs/static/generated/rest_v1_dispatcher.yml | 111 +++++++++++----------
.../flink/docs/rest/OpenApiSpecGenerator.java | 24 ++++-
.../flink/docs/rest/OpenApiSpecGeneratorTest.java | 26 +++++
.../docs/rest/data/TestEmptyMessageHeaders.java | 19 +++-
.../handlers/AbstractJarPlanHeaders.java | 5 +
.../webmonitor/handlers/JarDeleteHeaders.java | 5 +
.../webmonitor/handlers/JarPlanGetHeaders.java | 2 +
.../runtime/webmonitor/handlers/JarRunHeaders.java | 5 +
.../webmonitor/handlers/JarUploadHeaders.java | 5 +
.../job/rescaling/RescalingTriggerHeaders.java | 5 +
.../rest/messages/JobCancellationHeaders.java | 5 +
.../runtime/rest/messages/MessageHeaders.java | 27 +++++
.../rest/messages/cluster/ShutdownHeaders.java | 5 +
.../ClusterDataSetDeleteTriggerHeaders.java | 5 +
.../rest/messages/job/JobSubmitHeaders.java | 5 +
.../SavepointDisposalTriggerHeaders.java | 5 +
.../job/savepoints/SavepointTriggerHeaders.java | 5 +
.../stop/StopWithSavepointTriggerHeaders.java | 5 +
18 files changed, 213 insertions(+), 56 deletions(-)
diff --git a/docs/static/generated/rest_v1_dispatcher.yml
b/docs/static/generated/rest_v1_dispatcher.yml
index ff9e74a0150..859783719ae 100644
--- a/docs/static/generated/rest_v1_dispatcher.yml
+++ b/docs/static/generated/rest_v1_dispatcher.yml
@@ -11,12 +11,14 @@ paths:
/cluster:
delete:
description: Shuts down the cluster
+ operationId: shutdownCluster
responses:
"200":
description: The request was successful.
/config:
get:
description: Returns the configuration of the WebUI.
+ operationId: getDashboardConfiguration
responses:
"200":
description: The request was successful.
@@ -27,6 +29,7 @@ paths:
/datasets:
get:
description: Returns all cluster data sets.
+ operationId: getClusterDataSetList
responses:
"200":
description: The request was successful.
@@ -37,6 +40,7 @@ paths:
/datasets/delete/{triggerid}:
get:
description: Returns the status for the delete operation of a cluster
data set.
+ operationId: getClusterDataSetDeleteStatus
parameters:
- name: triggerid
in: path
@@ -56,6 +60,7 @@ paths:
delete:
description: Triggers the deletion of a cluster data set. This async
operation
would return a 'triggerid' for further query identifier.
+ operationId: deleteClusterDataSet
parameters:
- name: datasetid
in: path
@@ -74,6 +79,7 @@ paths:
/jars:
get:
description: Returns a list of all jars previously uploaded via
'/jars/upload'.
+ operationId: getJarList
responses:
"200":
description: The request was successful.
@@ -88,6 +94,7 @@ paths:
, as some http libraries do not add the header by default.\nUsing
'curl' you\
\ can upload a jar via 'curl -X POST -H \"Expect:\" -F
\"jarfile=@path/to/flink-job.jar\"\
\ http://hostname:port/jars/upload'."
+ operationId: uploadJar
requestBody:
content:
application/x-java-archive: {}
@@ -102,6 +109,7 @@ paths:
/jars/{jarid}:
delete:
description: Deletes a jar previously uploaded via '/jars/upload'.
+ operationId: deleteJar
parameters:
- name: jarid
in: path
@@ -115,62 +123,11 @@ paths:
"200":
description: The request was successful.
/jars/{jarid}/plan:
- get:
- description: Returns the dataflow plan of a job contained in a jar
previously
- uploaded via '/jars/upload'. Program arguments can be passed both via
the
- JSON request (recommended) or query parameters.
- parameters:
- - name: jarid
- in: path
- description: "String value that identifies a jar. When uploading the
jar a\
- \ path is returned, where the filename is the ID. This value is
equivalent\
- \ to the `id` field in the list of uploaded jars (/jars)."
- required: true
- schema:
- type: string
- - name: program-args
- in: query
- description: "Deprecated, please use 'programArg' instead. String
value that\
- \ specifies the arguments for the program or plan"
- required: false
- style: form
- schema:
- type: string
- - name: programArg
- in: query
- description: Comma-separated list of program arguments.
- required: false
- style: form
- schema:
- type: string
- - name: entry-class
- in: query
- description: String value that specifies the fully qualified name of
the entry
- point class. Overrides the class defined in the jar file manifest.
- required: false
- style: form
- schema:
- type: string
- - name: parallelism
- in: query
- description: Positive integer value that specifies the desired
parallelism
- for the job.
- required: false
- style: form
- schema:
- type: integer
- format: int32
- responses:
- "200":
- description: The request was successful.
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/JobPlanInfo'
post:
description: Returns the dataflow plan of a job contained in a jar
previously
uploaded via '/jars/upload'. Program arguments can be passed both via
the
JSON request (recommended) or query parameters.
+ operationId: generatePlanFromJar
parameters:
- name: jarid
in: path
@@ -229,6 +186,7 @@ paths:
description: Submits a job by running a jar previously uploaded via
'/jars/upload'.
Program arguments can be passed both via the JSON request
(recommended) or
query parameters.
+ operationId: submitJobFromJar
parameters:
- name: jarid
in: path
@@ -302,6 +260,7 @@ paths:
/jobmanager/config:
get:
description: Returns the cluster configuration.
+ operationId: getClusterConfigurationInfo
responses:
"200":
description: The request was successful.
@@ -317,6 +276,7 @@ paths:
/jobmanager/logs:
get:
description: Returns the list of log files on the JobManager.
+ operationId: getJobManagerLogList
responses:
"200":
description: The request was successful.
@@ -327,6 +287,7 @@ paths:
/jobmanager/metrics:
get:
description: Provides access to job manager metrics.
+ operationId: getJobManagerMetrics
parameters:
- name: get
in: query
@@ -345,6 +306,7 @@ paths:
/jobmanager/thread-dump:
get:
description: Returns the thread dump of the JobManager.
+ operationId: getJobManagerThreadDump
responses:
"200":
description: The request was successful.
@@ -355,6 +317,7 @@ paths:
/jobs:
get:
description: Returns an overview over all jobs and their current state.
+ operationId: getJobIdsWithStatusesOverview
responses:
"200":
description: The request was successful.
@@ -367,6 +330,7 @@ paths:
\ Flink client. This call expects a multipart/form-data request that
consists\
\ of file uploads for the serialized JobGraph, jars and distributed
cache\
\ artifacts and an attribute named \"request\" for the JSON payload."
+ operationId: submitJob
requestBody:
content:
multipart/form-data:
@@ -390,6 +354,7 @@ paths:
/jobs/metrics:
get:
description: Provides access to aggregated job metrics.
+ operationId: getAggregatedJobMetrics
parameters:
- name: get
in: query
@@ -424,6 +389,7 @@ paths:
/jobs/overview:
get:
description: Returns an overview over all jobs.
+ operationId: getJobsOverview
responses:
"200":
description: The request was successful.
@@ -434,6 +400,7 @@ paths:
/jobs/{jobid}:
get:
description: Returns details of a job.
+ operationId: getJobDetails
parameters:
- name: jobid
in: path
@@ -450,6 +417,7 @@ paths:
$ref: '#/components/schemas/JobDetailsInfo'
patch:
description: Terminates a job.
+ operationId: cancelJob
parameters:
- name: jobid
in: path
@@ -472,6 +440,7 @@ paths:
get:
description: "Returns the accumulators for all tasks of a job,
aggregated across\
\ the respective subtasks."
+ operationId: getJobAccumulators
parameters:
- name: jobid
in: path
@@ -497,6 +466,7 @@ paths:
/jobs/{jobid}/checkpoints:
get:
description: Returns checkpointing statistics for a job.
+ operationId: getCheckpointingStatistics
parameters:
- name: jobid
in: path
@@ -514,6 +484,7 @@ paths:
/jobs/{jobid}/checkpoints/config:
get:
description: Returns the checkpointing configuration.
+ operationId: getCheckpointConfig
parameters:
- name: jobid
in: path
@@ -531,6 +502,7 @@ paths:
/jobs/{jobid}/checkpoints/details/{checkpointid}:
get:
description: Returns details for a checkpoint.
+ operationId: getCheckpointStatisticDetails
parameters:
- name: jobid
in: path
@@ -555,6 +527,7 @@ paths:
/jobs/{jobid}/checkpoints/details/{checkpointid}/subtasks/{vertexid}:
get:
description: Returns checkpoint statistics for a task and its subtasks.
+ operationId: getTaskCheckpointStatistics
parameters:
- name: jobid
in: path
@@ -585,6 +558,7 @@ paths:
/jobs/{jobid}/config:
get:
description: Returns the configuration of a job.
+ operationId: getJobConfig
parameters:
- name: jobid
in: path
@@ -608,6 +582,7 @@ paths:
\ through web.exception-history-size in the Flink configuration. The
following\
\ first-level members are deprecated: 'root-exception', 'timestamp',
'all-exceptions',\
\ and 'truncated'. Use the data provided through 'exceptionHistory',
instead."
+ operationId: getJobExceptions
parameters:
- name: jobid
in: path
@@ -635,6 +610,7 @@ paths:
get:
description: Returns the result of a job execution. Gives access to the
execution
time of the job and to all accumulators created by this job.
+ operationId: getJobExecutionResult
parameters:
- name: jobid
in: path
@@ -652,6 +628,7 @@ paths:
/jobs/{jobid}/metrics:
get:
description: Provides access to job metrics.
+ operationId: getJobMetrics
parameters:
- name: jobid
in: path
@@ -676,6 +653,7 @@ paths:
/jobs/{jobid}/plan:
get:
description: Returns the dataflow plan of a job.
+ operationId: getJobPlan
parameters:
- name: jobid
in: path
@@ -694,6 +672,7 @@ paths:
patch:
description: Triggers the rescaling of a job. This async operation would
return
a 'triggerid' for further query identifier.
+ operationId: rescaleJob
parameters:
- name: jobid
in: path
@@ -719,6 +698,7 @@ paths:
/jobs/{jobid}/rescaling/{triggerid}:
get:
description: Returns the status of a rescaling operation.
+ operationId: getRescalingStatus
parameters:
- name: jobid
in: path
@@ -744,6 +724,7 @@ paths:
post:
description: "Triggers a savepoint, and optionally cancels the job
afterwards.\
\ This async operation would return a 'triggerid' for further query
identifier."
+ operationId: triggerSavepoint
parameters:
- name: jobid
in: path
@@ -766,6 +747,7 @@ paths:
/jobs/{jobid}/savepoints/{triggerid}:
get:
description: Returns the status of a savepoint operation.
+ operationId: getSavepointStatus
parameters:
- name: jobid
in: path
@@ -790,6 +772,7 @@ paths:
/jobs/{jobid}/status:
get:
description: Returns the current status of a job execution.
+ operationId: getJobStatusInfo
parameters:
- name: jobid
in: path
@@ -810,6 +793,7 @@ paths:
\ before taking the savepoint to flush out any state waiting for
timers to\
\ fire. This async operation would return a 'triggerid' for further
query\
\ identifier."
+ operationId: triggerStopWithSavepoint
parameters:
- name: jobid
in: path
@@ -832,6 +816,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}:
get:
description: "Returns details for a task, with a summary for each of its
subtasks."
+ operationId: getJobVertexDetails
parameters:
- name: jobid
in: path
@@ -856,6 +841,7 @@ paths:
get:
description: "Returns user-defined accumulators of a task, aggregated
across\
\ all subtasks."
+ operationId: getJobVertexAccumulators
parameters:
- name: jobid
in: path
@@ -880,6 +866,7 @@ paths:
get:
description: "Returns back-pressure information for a job, and may
initiate\
\ back-pressure sampling if necessary."
+ operationId: getJobVertexBackPressure
parameters:
- name: jobid
in: path
@@ -904,6 +891,7 @@ paths:
get:
description: "Returns flame graph information for a vertex, and may
initiate\
\ flame graph sampling if necessary."
+ operationId: getJobVertexFlameGraph
parameters:
- name: jobid
in: path
@@ -935,6 +923,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/metrics:
get:
description: Provides access to task metrics.
+ operationId: getJobVertexMetrics
parameters:
- name: jobid
in: path
@@ -965,6 +954,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/subtasks/accumulators:
get:
description: Returns all user-defined accumulators for all subtasks of a
task.
+ operationId: getSubtasksAllAccumulators
parameters:
- name: jobid
in: path
@@ -988,6 +978,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/subtasks/metrics:
get:
description: Provides access to aggregated subtask metrics.
+ operationId: getAggregatedSubtaskMetrics
parameters:
- name: jobid
in: path
@@ -1035,6 +1026,7 @@ paths:
get:
description: Returns details of the current or latest execution attempt
of a
subtask.
+ operationId: getSubtaskCurrentAttemptDetails
parameters:
- name: jobid
in: path
@@ -1066,6 +1058,7 @@ paths:
get:
description: Returns details of an execution attempt of a subtask.
Multiple
execution attempts happen in case of failure/recovery.
+ operationId: getSubtaskExecutionAttemptDetails
parameters:
- name: jobid
in: path
@@ -1104,6 +1097,7 @@ paths:
get:
description: Returns the accumulators of an execution attempt of a
subtask.
Multiple execution attempts happen in case of failure/recovery.
+ operationId: getSubtaskExecutionAttemptAccumulators
parameters:
- name: jobid
in: path
@@ -1141,6 +1135,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/subtasks/{subtaskindex}/metrics:
get:
description: Provides access to subtask metrics.
+ operationId: getSubtaskMetrics
parameters:
- name: jobid
in: path
@@ -1178,6 +1173,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/subtasktimes:
get:
description: Returns time-related information for all subtasks of a task.
+ operationId: getSubtasksTimes
parameters:
- name: jobid
in: path
@@ -1201,6 +1197,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/taskmanagers:
get:
description: Returns task information aggregated by task manager.
+ operationId: getJobVertexTaskManagers
parameters:
- name: jobid
in: path
@@ -1224,6 +1221,7 @@ paths:
/jobs/{jobid}/vertices/{vertexid}/watermarks:
get:
description: Returns the watermarks for all subtasks of a task.
+ operationId: getJobVertexWatermarks
parameters:
- name: jobid
in: path
@@ -1247,6 +1245,7 @@ paths:
/overview:
get:
description: Returns an overview over the Flink cluster.
+ operationId: getClusterOverview
responses:
"200":
description: The request was successful.
@@ -1258,6 +1257,7 @@ paths:
post:
description: Triggers the desposal of a savepoint. This async operation
would
return a 'triggerid' for further query identifier.
+ operationId: triggerSavepointDisposal
requestBody:
content:
application/json:
@@ -1273,6 +1273,7 @@ paths:
/savepoint-disposal/{triggerid}:
get:
description: Returns the status of a savepoint disposal operation.
+ operationId: getSavepointDisposalStatus
parameters:
- name: triggerid
in: path
@@ -1291,6 +1292,7 @@ paths:
/taskmanagers:
get:
description: Returns an overview over all task managers.
+ operationId: getTaskManagers
responses:
"200":
description: The request was successful.
@@ -1301,6 +1303,7 @@ paths:
/taskmanagers/metrics:
get:
description: Provides access to aggregated task manager metrics.
+ operationId: getAggregatedTaskManagerMetrics
parameters:
- name: get
in: query
@@ -1337,6 +1340,7 @@ paths:
description: Returns details for a task manager.
"metrics.memorySegmentsAvailable"
and "metrics.memorySegmentsTotal" are deprecated. Please use
"metrics.nettyShuffleMemorySegmentsAvailable"
and "metrics.nettyShuffleMemorySegmentsTotal" instead.
+ operationId: getTaskManagerDetails
parameters:
- name: taskmanagerid
in: path
@@ -1354,6 +1358,7 @@ paths:
/taskmanagers/{taskmanagerid}/logs:
get:
description: Returns the list of log files on a TaskManager.
+ operationId: getTaskManagerLogs
parameters:
- name: taskmanagerid
in: path
@@ -1371,6 +1376,7 @@ paths:
/taskmanagers/{taskmanagerid}/metrics:
get:
description: Provides access to task manager metrics.
+ operationId: getTaskManagerMetrics
parameters:
- name: taskmanagerid
in: path
@@ -1395,6 +1401,7 @@ paths:
/taskmanagers/{taskmanagerid}/thread-dump:
get:
description: Returns the thread dump of the requested TaskManager.
+ operationId: getTaskManagerThreadDump
parameters:
- name: taskmanagerid
in: path
diff --git
a/flink-docs/src/main/java/org/apache/flink/docs/rest/OpenApiSpecGenerator.java
b/flink-docs/src/main/java/org/apache/flink/docs/rest/OpenApiSpecGenerator.java
index 59bd3a673cb..4629304195a 100644
---
a/flink-docs/src/main/java/org/apache/flink/docs/rest/OpenApiSpecGenerator.java
+++
b/flink-docs/src/main/java/org/apache/flink/docs/rest/OpenApiSpecGenerator.java
@@ -82,7 +82,9 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import java.util.stream.Collectors;
/**
@@ -143,7 +145,8 @@ public class OpenApiSpecGenerator {
.filter(spec ->
spec.getSupportedAPIVersions().contains(apiVersion))
.filter(OpenApiSpecGenerator::shouldBeDocumented)
.collect(Collectors.toList());
- specs.forEach(spec -> add(spec, openApi));
+ final Set<String> usedOperationIds = new HashSet<>();
+ specs.forEach(spec -> add(spec, openApi, usedOperationIds));
final List<Schema> asyncOperationSchemas =
collectAsyncOperationResultVariants(specs);
@@ -276,7 +279,8 @@ public class OpenApiSpecGenerator {
.addSchemas(SerializedThrowable.class.getSimpleName(),
serializedThrowableSchema);
}
- private static void add(MessageHeaders<?, ?, ?> spec, OpenAPI openApi) {
+ private static void add(
+ MessageHeaders<?, ?, ?> spec, OpenAPI openApi, Set<String>
usedOperationIds) {
final PathItem pathItem =
openApi.getPaths()
.computeIfAbsent(
@@ -289,6 +293,7 @@ public class OpenApiSpecGenerator {
operation.description(spec.getDescription());
+ setOperationId(operation, spec, usedOperationIds);
setParameters(operation, spec);
setRequest(operation, spec);
setResponse(operation, spec);
@@ -296,6 +301,21 @@ public class OpenApiSpecGenerator {
pathItem.operation(convert(spec.getHttpMethod()), operation);
}
+ private static void setOperationId(
+ final Operation operation,
+ final MessageHeaders<?, ?, ?> spec,
+ Set<String> usedOperationIds) {
+ final String operationId = spec.operationId();
+
+ if (!usedOperationIds.add(operationId)) {
+ throw new IllegalStateException(
+ String.format(
+ "Duplicate OperationId '%s' for path '%s'",
+ operationId, spec.getTargetRestEndpointURL()));
+ }
+ operation.setOperationId(operationId);
+ }
+
private static void setParameters(
final Operation operation, final MessageHeaders<?, ?, ?> spec) {
List<Parameter> parameters = new ArrayList<>();
diff --git
a/flink-docs/src/test/java/org/apache/flink/docs/rest/OpenApiSpecGeneratorTest.java
b/flink-docs/src/test/java/org/apache/flink/docs/rest/OpenApiSpecGeneratorTest.java
index f3d3555b94d..e2d6912323e 100644
---
a/flink-docs/src/test/java/org/apache/flink/docs/rest/OpenApiSpecGeneratorTest.java
+++
b/flink-docs/src/test/java/org/apache/flink/docs/rest/OpenApiSpecGeneratorTest.java
@@ -36,6 +36,7 @@ import java.util.List;
import java.util.concurrent.CompletableFuture;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
+import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
/** Test class for {@link OpenApiSpecGenerator}. */
class OpenApiSpecGeneratorTest {
@@ -86,4 +87,29 @@ class OpenApiSpecGeneratorTest {
null));
}
}
+
+ @Test
+ void testDuplicateOperationIdsAreRejected() throws Exception {
+ File file = File.createTempFile("rest_v0_", ".html");
+ assertThatThrownBy(
+ () ->
+ OpenApiSpecGenerator.createDocumentationFile(
+ new
TestDuplicateOperationIdDocumentingRestEndpoint(),
+ RestAPIVersion.V0,
+ file.toPath()))
+ .isInstanceOf(IllegalStateException.class)
+ .hasMessageContaining("Duplicate OperationId");
+ }
+
+ private static class TestDuplicateOperationIdDocumentingRestEndpoint
+ implements DocumentingRestEndpoint {
+
+ @Override
+ public List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>>
initializeHandlers(
+ CompletableFuture<String> localAddressFuture) {
+ return Arrays.asList(
+ Tuple2.of(new TestEmptyMessageHeaders("operation1"), null),
+ Tuple2.of(new TestEmptyMessageHeaders("operation1"),
null));
+ }
+ }
}
diff --git
a/flink-docs/src/test/java/org/apache/flink/docs/rest/data/TestEmptyMessageHeaders.java
b/flink-docs/src/test/java/org/apache/flink/docs/rest/data/TestEmptyMessageHeaders.java
index bc7480398e1..228a36fc34e 100644
---
a/flink-docs/src/test/java/org/apache/flink/docs/rest/data/TestEmptyMessageHeaders.java
+++
b/flink-docs/src/test/java/org/apache/flink/docs/rest/data/TestEmptyMessageHeaders.java
@@ -29,6 +29,7 @@ import
org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseSt
import java.util.Collection;
import java.util.Collections;
+import java.util.UUID;
/**
* A {@link MessageHeaders} for testing purpose. Its request body, response
body and message
@@ -42,15 +43,24 @@ public class TestEmptyMessageHeaders
private final String url;
private final String description;
+ private final String operationId;
public TestEmptyMessageHeaders() {
- this.url = URL;
- this.description = DESCRIPTION;
+ this(URL, DESCRIPTION, UUID.randomUUID().toString());
}
public TestEmptyMessageHeaders(String url, String description) {
+ this(url, description, UUID.randomUUID().toString());
+ }
+
+ public TestEmptyMessageHeaders(String operationId) {
+ this(URL, DESCRIPTION, operationId);
+ }
+
+ private TestEmptyMessageHeaders(String url, String description, String
operationId) {
this.url = url;
this.description = description;
+ this.operationId = operationId;
}
@Override
@@ -73,6 +83,11 @@ public class TestEmptyMessageHeaders
return HttpResponseStatus.OK;
}
+ @Override
+ public String operationId() {
+ return operationId;
+ }
+
@Override
public String getDescription() {
return description;
diff --git
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/AbstractJarPlanHeaders.java
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/AbstractJarPlanHeaders.java
index be11524974e..324b42ec199 100644
---
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/AbstractJarPlanHeaders.java
+++
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/AbstractJarPlanHeaders.java
@@ -52,6 +52,11 @@ public abstract class AbstractJarPlanHeaders
return "/jars/:" + JarIdPathParameter.KEY + "/plan";
}
+ @Override
+ public String operationId() {
+ return "generatePlanFromJar";
+ }
+
@Override
public String getDescription() {
return "Returns the dataflow plan of a job contained in a jar
previously uploaded via '"
diff --git
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarDeleteHeaders.java
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarDeleteHeaders.java
index c747f69a79f..e9fa0a3fbe0 100644
---
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarDeleteHeaders.java
+++
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarDeleteHeaders.java
@@ -65,6 +65,11 @@ public class JarDeleteHeaders
return INSTANCE;
}
+ @Override
+ public String operationId() {
+ return "deleteJar";
+ }
+
@Override
public String getDescription() {
return "Deletes a jar previously uploaded via '" +
JarUploadHeaders.URL + "'.";
diff --git
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarPlanGetHeaders.java
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarPlanGetHeaders.java
index c2082f7b77d..5d7737bad80 100644
---
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarPlanGetHeaders.java
+++
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarPlanGetHeaders.java
@@ -18,9 +18,11 @@
package org.apache.flink.runtime.webmonitor.handlers;
+import org.apache.flink.annotation.docs.Documentation;
import org.apache.flink.runtime.rest.HttpMethodWrapper;
/** Message headers for {@link JarPlanHandler}. */
[email protected]("Subsumed by JarPlanPostHeaders")
public class JarPlanGetHeaders extends AbstractJarPlanHeaders {
private static final JarPlanGetHeaders INSTANCE = new JarPlanGetHeaders();
diff --git
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarRunHeaders.java
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarRunHeaders.java
index e0321fa71a0..011188e780f 100644
---
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarRunHeaders.java
+++
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarRunHeaders.java
@@ -72,4 +72,9 @@ public class JarRunHeaders
+ "'. "
+ "Program arguments can be passed both via the JSON request
(recommended) or query parameters.";
}
+
+ @Override
+ public String operationId() {
+ return "submitJobFromJar";
+ }
}
diff --git
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarUploadHeaders.java
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarUploadHeaders.java
index 641e3afb558..e18b15c0c92 100644
---
a/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarUploadHeaders.java
+++
b/flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarUploadHeaders.java
@@ -77,6 +77,11 @@ public final class JarUploadHeaders
+ "'.";
}
+ @Override
+ public String operationId() {
+ return "uploadJar";
+ }
+
@Override
public boolean acceptsFileUploads() {
return true;
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingTriggerHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingTriggerHeaders.java
index 89fb332483c..7b75d1b0408 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingTriggerHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingTriggerHeaders.java
@@ -69,4 +69,9 @@ public class RescalingTriggerHeaders
protected String getAsyncOperationDescription() {
return "Triggers the rescaling of a job.";
}
+
+ @Override
+ public String operationId() {
+ return "rescaleJob";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobCancellationHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobCancellationHeaders.java
index d6bd74bd6e3..260f39eb6cc 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobCancellationHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobCancellationHeaders.java
@@ -72,4 +72,9 @@ public class JobCancellationHeaders
public String getDescription() {
return "Terminates a job.";
}
+
+ @Override
+ public String operationId() {
+ return "cancelJob";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
index 12628654b9e..a6c0d11d69b 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageHeaders.java
@@ -18,10 +18,13 @@
package org.apache.flink.runtime.rest.messages;
+import org.apache.flink.runtime.rest.HttpMethodWrapper;
+
import
org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus;
import java.util.Collection;
import java.util.Collections;
+import java.util.Locale;
/**
* This class links {@link RequestBody}s to {@link ResponseBody}s types and
contains meta-data
@@ -66,4 +69,28 @@ public interface MessageHeaders<
* @return description for the header
*/
String getDescription();
+
+ /**
+ * Returns a short description for this header suitable for method code
generation.
+ *
+ * @return short description
+ */
+ default String operationId() {
+ if (getHttpMethod() != HttpMethodWrapper.GET) {
+ throw new UnsupportedOperationException(
+ "The default implementation is only supported for GET
calls. Please override 'operationId()'.");
+ }
+
+ final String className = getClass().getSimpleName();
+ final int headersSuffixStart = className.lastIndexOf("Headers");
+ if (headersSuffixStart == -1) {
+ throw new IllegalStateException(
+ "Expect name of class "
+ + getClass()
+ + " to end on 'Headers'. Please rename the class
or override 'operationId()'.");
+ }
+
+ return getHttpMethod().name().toLowerCase(Locale.ROOT)
+ + className.substring(0, headersSuffixStart);
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/cluster/ShutdownHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/cluster/ShutdownHeaders.java
index 4ebb802b47d..a6179f66678 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/cluster/ShutdownHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/cluster/ShutdownHeaders.java
@@ -70,4 +70,9 @@ public class ShutdownHeaders
public String getDescription() {
return "Shuts down the cluster";
}
+
+ @Override
+ public String operationId() {
+ return "shutdownCluster";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/dataset/ClusterDataSetDeleteTriggerHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/dataset/ClusterDataSetDeleteTriggerHeaders.java
index 5795ae91354..f788c9dcdbf 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/dataset/ClusterDataSetDeleteTriggerHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/dataset/ClusterDataSetDeleteTriggerHeaders.java
@@ -69,4 +69,9 @@ public class ClusterDataSetDeleteTriggerHeaders
public String getTargetRestEndpointURL() {
return URL;
}
+
+ @Override
+ public String operationId() {
+ return "deleteClusterDataSet";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/JobSubmitHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/JobSubmitHeaders.java
index fe4c07a2427..3cb8215d4b3 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/JobSubmitHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/JobSubmitHeaders.java
@@ -83,4 +83,9 @@ public class JobSubmitHeaders
public boolean acceptsFileUploads() {
return true;
}
+
+ @Override
+ public String operationId() {
+ return "submitJob";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointDisposalTriggerHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointDisposalTriggerHeaders.java
index 8e072a3b1ac..10699364958 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointDisposalTriggerHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointDisposalTriggerHeaders.java
@@ -73,4 +73,9 @@ public class SavepointDisposalTriggerHeaders
protected String getAsyncOperationDescription() {
return "Triggers the desposal of a savepoint.";
}
+
+ @Override
+ public String operationId() {
+ return "triggerSavepointDisposal";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointTriggerHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointTriggerHeaders.java
index 7bb237a4516..5a799fc3de2 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointTriggerHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/SavepointTriggerHeaders.java
@@ -77,4 +77,9 @@ public class SavepointTriggerHeaders
protected String getAsyncOperationDescription() {
return "Triggers a savepoint, and optionally cancels the job
afterwards.";
}
+
+ @Override
+ public String operationId() {
+ return "triggerSavepoint";
+ }
}
diff --git
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/stop/StopWithSavepointTriggerHeaders.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/stop/StopWithSavepointTriggerHeaders.java
index 3ee8fd2f6eb..5cf6b65d32d 100644
---
a/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/stop/StopWithSavepointTriggerHeaders.java
+++
b/flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/savepoints/stop/StopWithSavepointTriggerHeaders.java
@@ -80,4 +80,9 @@ public class StopWithSavepointTriggerHeaders
return "Stops a job with a savepoint. Optionally, it can also emit a
MAX_WATERMARK before taking"
+ " the savepoint to flush out any state waiting for timers to
fire.";
}
+
+ @Override
+ public String operationId() {
+ return "triggerStopWithSavepoint";
+ }
}