This is an automated email from the ASF dual-hosted git repository.
hansva pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/hop.git
The following commit(s) were added to refs/heads/main by this push:
new 162457dd03 added servlets to execute workflows and pipelines from
file. fixes #6197 (#6198)
162457dd03 is described below
commit 162457dd03e458bf2b8f005cafec611eb59b15e7
Author: Bart Maertens <[email protected]>
AuthorDate: Tue Dec 23 13:29:56 2025 +0000
added servlets to execute workflows and pipelines from file. fixes #6197
(#6198)
---
.../integration-tests-hop_server.yaml | 7 +
.../modules/ROOT/pages/hop-server/rest-api.adoc | 154 ++++++++
.../org/apache/hop/www/ExecPipelineServlet.java | 280 ++++++++++++++
.../org/apache/hop/www/ExecWorkflowServlet.java | 277 ++++++++++++++
.../main-0006-test-exec-pipeline-workflow.hwf | 402 +++++++++++++++++++++
5 files changed, 1120 insertions(+)
diff --git a/docker/integration-tests/integration-tests-hop_server.yaml
b/docker/integration-tests/integration-tests-hop_server.yaml
index 21999d3653..a102fe58cb 100644
--- a/docker/integration-tests/integration-tests-hop_server.yaml
+++ b/docker/integration-tests/integration-tests-hop_server.yaml
@@ -28,6 +28,10 @@ services:
hop_server:
image: apache/hop:Development
+ build:
+ context: ../../.
+ dockerfile: docker/Dockerfile
+ pull_policy: never
hostname: hop-server
ports:
- "8181"
@@ -35,6 +39,9 @@ services:
- HOP_SERVER_PORT=8181
- HOP_SERVER_HOSTNAME=hop-server
- HOP_SERVER_METADATA_FOLDER=/hop_server_volume/metadata
+ - HOP_PROJECT_NAME=samples
+ - HOP_PROJECT_FOLDER=/opt/hop/config/projects/samples
+ - HOP_PROJECT_CONFIG_FILE_NAME=project-config.json
volumes:
- ./../../integration-tests/hop_server/:/hop_server_volume
healthcheck:
diff --git a/docs/hop-user-manual/modules/ROOT/pages/hop-server/rest-api.adoc
b/docs/hop-user-manual/modules/ROOT/pages/hop-server/rest-api.adoc
index 29b8d94cde..d5a7362f9a 100644
--- a/docs/hop-user-manual/modules/ROOT/pages/hop-server/rest-api.adoc
+++ b/docs/hop-user-manual/modules/ROOT/pages/hop-server/rest-api.adoc
@@ -760,6 +760,160 @@ Example result:
</HTML>
----
+== Execute Pipeline from File
+
+name::
+execPipeline
+
+description::
+Execute a pipeline directly from a file path on the server's file system.
+The pipeline is loaded, executed synchronously, and the result is returned
immediately.
+This servlet supports variable resolution in file paths (e.g.,
`${PROJECT_HOME}`) and can pass parameters and variables to the pipeline.
+
+endPoint::
+GET `/hop/execPipeline`
+
+parameters::
+* pipeline (required): File path to the pipeline (.hpl file). Can use
variables like `${PROJECT_HOME}` which will be resolved by the server.
+* level (optional): Logging level (e.g., Debug, Basic, Detailed, Error,
Minimal, Nothing, RowLevel). Defaults to server's default log level.
+* runConfig (optional): Run configuration name to use for execution. If not
specified, the default run configuration from metadata is used, or "local" if
no default is found.
+* *any parameter name*: Any additional URL parameters will be passed as
pipeline parameters or variables. If the parameter name matches a pipeline
parameter, it will be set as a parameter; otherwise, it will be set as a
variable.
+
+example request::
+GET
`+http://localhost:8081/hop/execPipeline?pipeline=/opt/hop/config/projects/samples/pipelines/pipeline-with-parameter.hpl&PRM_EXAMPLE=test_value&level=BASIC+`
+
+Using project variables:
+GET
`+http://localhost:8081/hop/execPipeline?pipeline=${PROJECT_HOME}/pipelines/my-pipeline.hpl&runConfig=local&level=Detailed+`
+
+result::
+
+Success response (HTTP 200):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>OK</result>
+ <message>Pipeline executed successfully</message>
+ <id/>
+</webresult>
+----
+
+Error response - Missing parameter (HTTP 400):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>ERROR</result>
+ <message>Missing mandatory parameter: pipeline</message>
+ <id/>
+</webresult>
+----
+
+Error response - File not found (HTTP 404):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>ERROR</result>
+ <message>Unable to find pipeline: /path/to/nonexistent.hpl (resolved:
/path/to/nonexistent.hpl)</message>
+ <id/>
+</webresult>
+----
+
+Error response - Execution error (HTTP 500):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>ERROR</result>
+ <message>Error executing pipeline: [log output]</message>
+ <id/>
+</webresult>
+----
+
+notes::
+* The pipeline path supports variable resolution. If the server is started
with a project (using `-j` flag), variables like `${PROJECT_HOME}` will be
resolved.
+* The execution is synchronous - the servlet waits for the pipeline to
complete before returning.
+* The pipeline is automatically registered in the server's pipeline map and
can be queried using `getPipelineStatus`.
+* All URL parameters except `pipeline`, `level`, and `runConfig` are passed to
the pipeline as parameters or variables.
+
+== Execute Workflow from File
+
+name::
+execWorkflow
+
+description::
+Execute a workflow directly from a file path on the server's file system.
+The workflow is loaded, executed synchronously, and the result is returned
immediately.
+This servlet supports variable resolution in file paths (e.g.,
`${PROJECT_HOME}`) and can pass parameters and variables to the workflow.
+
+endPoint::
+GET `/hop/execWorkflow`
+
+parameters::
+* workflow (required): File path to the workflow (.hwf file). Can use
variables like `${PROJECT_HOME}` which will be resolved by the server.
+* level (optional): Logging level (e.g., Debug, Basic, Detailed, Error,
Minimal, Nothing, RowLevel). Defaults to server's default log level.
+* runConfig (optional): Run configuration name to use for execution. If not
specified, the default run configuration from metadata is used, or "local" if
no default is found.
+* *any parameter name*: Any additional URL parameters will be passed as
workflow parameters or variables. If the parameter name matches a workflow
parameter, it will be set as a parameter; otherwise, it will be set as a
variable.
+
+example request::
+GET
`+http://localhost:8081/hop/execWorkflow?workflow=/opt/hop/config/projects/samples/workflows/parallel/parallel-workflow.hwf&level=BASIC+`
+
+Using project variables:
+GET
`+http://localhost:8081/hop/execWorkflow?workflow=${PROJECT_HOME}/workflows/my-workflow.hwf&runConfig=local&level=Detailed+`
+
+result::
+
+Success response (HTTP 200):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>OK</result>
+ <message>Workflow executed successfully</message>
+ <id/>
+</webresult>
+----
+
+Error response - Missing parameter (HTTP 400):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>ERROR</result>
+ <message>Missing mandatory parameter: workflow</message>
+ <id/>
+</webresult>
+----
+
+Error response - File not found (HTTP 404):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>ERROR</result>
+ <message>Unable to find workflow: /path/to/nonexistent.hwf (resolved:
/path/to/nonexistent.hwf)</message>
+ <id/>
+</webresult>
+----
+
+Error response - Execution error (HTTP 500):
+[source,xml]
+----
+<?xml version="1.0" encoding="UTF-8"?>
+<webresult>
+ <result>ERROR</result>
+ <message>Error executing workflow: [log output]</message>
+ <id/>
+</webresult>
+----
+
+notes::
+* The workflow path supports variable resolution. If the server is started
with a project (using `-j` flag), variables like `${PROJECT_HOME}` will be
resolved.
+* The execution is synchronous - the servlet waits for the workflow to
complete before returning.
+* The workflow is automatically registered in the server's workflow map and
can be queried using `getWorkflowStatus`.
+* All URL parameters except `workflow`, `level`, and `runConfig` are passed to
the workflow as parameters or variables.
+
== Stop Pipeline
name::
diff --git a/engine/src/main/java/org/apache/hop/www/ExecPipelineServlet.java
b/engine/src/main/java/org/apache/hop/www/ExecPipelineServlet.java
new file mode 100644
index 0000000000..bdb68ee9d9
--- /dev/null
+++ b/engine/src/main/java/org/apache/hop/www/ExecPipelineServlet.java
@@ -0,0 +1,280 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hop.www;
+
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.Enumeration;
+import java.util.UUID;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hop.core.Const;
+import org.apache.hop.core.annotations.HopServerServlet;
+import org.apache.hop.core.exception.HopException;
+import org.apache.hop.core.logging.HopLogStore;
+import org.apache.hop.core.logging.LogLevel;
+import org.apache.hop.core.logging.LoggingObjectType;
+import org.apache.hop.core.logging.SimpleLoggingObject;
+import org.apache.hop.core.metadata.SerializableMetadataProvider;
+import org.apache.hop.core.util.Utils;
+import org.apache.hop.metadata.api.IHopMetadataProvider;
+import org.apache.hop.pipeline.PipelineConfiguration;
+import org.apache.hop.pipeline.PipelineExecutionConfiguration;
+import org.apache.hop.pipeline.PipelineMeta;
+import org.apache.hop.pipeline.config.PipelineRunConfiguration;
+import org.apache.hop.pipeline.engine.IPipelineEngine;
+import org.apache.hop.pipeline.engine.PipelineEngineFactory;
+
+@HopServerServlet(id = "execPipeline", name = "Execute pipeline from file
path")
+public class ExecPipelineServlet extends BaseHttpServlet implements
IHopServerPlugin {
+
+ private static final long serialVersionUID = -5879219287669847357L;
+
+ private static final String UNABLE_TO_FIND_PIPELINE = "Unable to find
pipeline";
+
+ private static final String PIPELINE = "pipeline";
+ private static final String LEVEL = "level";
+ private static final String RUN_CONFIG = "runConfig";
+
+ public static final String CONTEXT_PATH = "/hop/execPipeline";
+
+ public ExecPipelineServlet() {}
+
+ public ExecPipelineServlet(PipelineMap pipelineMap) {
+ super(pipelineMap);
+ }
+
+ /**
+ * Executes a pipeline from the specified file path.
+ *
+ * <p>Example Request:<br>
+ * GET /hop/execPipeline/?pipeline=/path/to/pipeline.hpl&level=INFO
+ *
+ * <h3>Parameters</h3>
+ *
+ * <table>
+ * <tr><th>name</th><th>description</th><th>type</th></tr>
+ * <tr><td>pipeline</td><td>File path to pipeline (.hpl file)</td><td>query
(required)</td></tr>
+ * <tr><td>level</td><td>Logging level (i.e. Debug, Basic,
Detailed)</td><td>query (optional)</td></tr>
+ * <tr><td>runConfig</td><td>Run configuration name (optional, defaults to
LocalPipelineEngine)</td><td>query (optional)</td></tr>
+ * <tr><td>*any name*</td><td>All other parameters will be set as
variables/parameters in the pipeline</td><td>query</td></tr>
+ * </table>
+ *
+ * <h3>Response</h3>
+ *
+ * Returns XML with webresult containing OK or ERROR status and message.
+ */
+ @Override
+ public void doGet(HttpServletRequest request, HttpServletResponse response)
+ throws ServletException, IOException {
+ if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
+ return;
+ }
+
+ if (log.isDebug()) {
+ logDebug("Execute pipeline requested");
+ }
+
+ String[] knownOptions = new String[] {PIPELINE, LEVEL, RUN_CONFIG};
+
+ String pipelineOption = request.getParameter(PIPELINE);
+ String levelOption = request.getParameter(LEVEL);
+ String runConfigOption = request.getParameter(RUN_CONFIG);
+
+ response.setStatus(HttpServletResponse.SC_OK);
+
+ String encoding = System.getProperty("HOP_DEFAULT_SERVLET_ENCODING", null);
+ if (encoding != null && !Utils.isEmpty(encoding.trim())) {
+ response.setCharacterEncoding(encoding);
+ response.setContentType("text/html; charset=" + encoding);
+ } else {
+ response.setContentType("text/xml");
+ response.setCharacterEncoding(Const.XML_ENCODING);
+ }
+
+ PrintWriter out = response.getWriter();
+
+ if (pipelineOption == null) {
+ response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+ out.println(
+ new WebResult(WebResult.STRING_ERROR, "Missing mandatory parameter:
" + PIPELINE));
+ return;
+ }
+
+ try {
+ // Get metadata provider from server config
+ IHopMetadataProvider metadataProvider =
getServerConfig().getMetadataProvider();
+ if (metadataProvider == null) {
+ throw new HopException("Metadata provider is not available");
+ }
+
+ // Resolve variables in the pipeline path (e.g., ${PROJECT_HOME})
+ String resolvedPipelinePath = variables.resolve(pipelineOption);
+
+ // Load pipeline from file
+ PipelineMeta pipelineMeta =
+ new PipelineMeta(resolvedPipelinePath, metadataProvider, variables);
+
+ // Set the servlet parameters as variables/parameters in the pipeline
+ String[] parameters = pipelineMeta.listParameters();
+ Enumeration<String> parameterNames = request.getParameterNames();
+ while (parameterNames.hasMoreElements()) {
+ String parameter = parameterNames.nextElement();
+ String[] values = request.getParameterValues(parameter);
+
+ // Ignore the known options, set the rest as variables/parameters
+ if (Const.indexOfString(parameter, knownOptions) < 0) {
+ // If it's a pipeline parameter, it will be set later via
setParameterValue
+ // Otherwise, set as variable
+ if (Const.indexOfString(parameter, parameters) < 0) {
+ variables.setVariable(parameter, values[0]);
+ }
+ }
+ }
+
+ // Create execution configuration
+ PipelineExecutionConfiguration pipelineExecutionConfiguration =
+ new PipelineExecutionConfiguration();
+
+ // Set logging level
+ LogLevel logLevel = null;
+ if (!Utils.isEmpty(levelOption)) {
+ logLevel = LogLevel.lookupCode(levelOption);
+ }
+ if (logLevel != null) {
+ pipelineExecutionConfiguration.setLogLevel(logLevel);
+ }
+
+ // Determine run configuration
+ String runConfigurationName;
+ if (!StringUtils.isEmpty(runConfigOption)) {
+ // Use specified run configuration
+ runConfigurationName = runConfigOption;
+ } else {
+ // Try to find a default run configuration
+ PipelineRunConfiguration defaultRunConfiguration =
+ PipelineRunConfiguration.findDefault(metadataProvider);
+ if (defaultRunConfiguration != null) {
+ runConfigurationName = defaultRunConfiguration.getName();
+ } else {
+ // Fallback to "local" if no default is found
+ runConfigurationName = "local";
+ }
+ }
+ pipelineExecutionConfiguration.setRunConfiguration(runConfigurationName);
+
+ String serverObjectId = UUID.randomUUID().toString();
+ SimpleLoggingObject servletLoggingObject =
+ new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.HOP_SERVER,
null);
+ servletLoggingObject.setContainerObjectId(serverObjectId);
+ if (logLevel != null) {
+ servletLoggingObject.setLogLevel(logLevel);
+ }
+
+ // Create the pipeline engine using the run configuration from execution
configuration
+ IPipelineEngine<PipelineMeta> pipeline =
+ PipelineEngineFactory.createPipelineEngine(
+ variables,
+
variables.resolve(pipelineExecutionConfiguration.getRunConfiguration()),
+ metadataProvider,
+ pipelineMeta);
+ pipeline.setParent(servletLoggingObject);
+ pipeline.setMetadataProvider(metadataProvider);
+
+ // Set parameters from request
+ pipeline.copyParametersFromDefinitions(pipelineMeta);
+ for (String parameter : parameters) {
+ String value = request.getParameter(parameter);
+ if (value != null) {
+ pipeline.setParameterValue(parameter, value);
+ }
+ }
+ pipeline.activateParameters(pipeline);
+
+ // Create pipeline configuration for registration
+ SerializableMetadataProvider serializableMetadataProvider =
+ new SerializableMetadataProvider(metadataProvider);
+ PipelineConfiguration pipelineConfiguration =
+ new PipelineConfiguration(
+ pipelineMeta, pipelineExecutionConfiguration,
serializableMetadataProvider);
+
+ // Register pipeline in the map
+ getPipelineMap()
+ .addPipeline(pipelineMeta.getName(), serverObjectId, pipeline,
pipelineConfiguration);
+ pipeline.setContainerId(serverObjectId);
+
+ // Execute the pipeline synchronously
+ executePipeline(pipeline);
+
+ // Get logging output
+ String logging =
+ HopLogStore.getAppender().getBuffer(pipeline.getLogChannelId(),
false).toString();
+
+ // Check for errors
+ if (pipeline.isFinished() && pipeline.getErrors() > 0) {
+ response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ out.println(new WebResult(WebResult.STRING_ERROR, "Error executing
pipeline: " + logging));
+ } else {
+ out.println(new WebResult(WebResult.STRING_OK, "Pipeline executed
successfully"));
+ }
+ out.flush();
+
+ } catch (Exception ex) {
+ if (ex.getMessage() != null &&
ex.getMessage().contains(UNABLE_TO_FIND_PIPELINE)) {
+ response.setStatus(HttpServletResponse.SC_NOT_FOUND);
+ out.println(
+ new WebResult(
+ WebResult.STRING_ERROR,
+ "Unable to find pipeline: "
+ + pipelineOption
+ + " (resolved: "
+ + (pipelineOption != null ?
variables.resolve(pipelineOption) : "null")
+ + ")"));
+ } else {
+ String logging = Const.getStackTracker(ex);
+ response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ out.println(
+ new WebResult(
+ WebResult.STRING_ERROR,
+ "Unexpected error executing pipeline: " + Const.CR + logging));
+ }
+ }
+ }
+
+ protected void executePipeline(IPipelineEngine<PipelineMeta> pipeline)
throws HopException {
+ pipeline.prepareExecution();
+ pipeline.startThreads();
+ pipeline.waitUntilFinished();
+ }
+
+ @Override
+ public String getContextPath() {
+ return CONTEXT_PATH;
+ }
+
+ @Override
+ public String getService() {
+ return CONTEXT_PATH + " (" + toString() + ")";
+ }
+
+ public String toString() {
+ return "Execute pipeline from file";
+ }
+}
diff --git a/engine/src/main/java/org/apache/hop/www/ExecWorkflowServlet.java
b/engine/src/main/java/org/apache/hop/www/ExecWorkflowServlet.java
new file mode 100644
index 0000000000..b617c70df1
--- /dev/null
+++ b/engine/src/main/java/org/apache/hop/www/ExecWorkflowServlet.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hop.www;
+
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.Enumeration;
+import java.util.UUID;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hop.core.Const;
+import org.apache.hop.core.Result;
+import org.apache.hop.core.annotations.HopServerServlet;
+import org.apache.hop.core.exception.HopException;
+import org.apache.hop.core.logging.HopLogStore;
+import org.apache.hop.core.logging.LogLevel;
+import org.apache.hop.core.logging.LoggingObjectType;
+import org.apache.hop.core.logging.SimpleLoggingObject;
+import org.apache.hop.core.util.Utils;
+import org.apache.hop.metadata.api.IHopMetadataProvider;
+import org.apache.hop.workflow.WorkflowConfiguration;
+import org.apache.hop.workflow.WorkflowExecutionConfiguration;
+import org.apache.hop.workflow.WorkflowMeta;
+import org.apache.hop.workflow.config.WorkflowRunConfiguration;
+import org.apache.hop.workflow.engine.IWorkflowEngine;
+import org.apache.hop.workflow.engine.WorkflowEngineFactory;
+
+@HopServerServlet(id = "execWorkflow", name = "Execute workflow from file
path")
+public class ExecWorkflowServlet extends BaseHttpServlet implements
IHopServerPlugin {
+
+ private static final long serialVersionUID = -5879219287669847357L;
+
+ private static final String UNABLE_TO_FIND_WORKFLOW = "Unable to find
workflow";
+
+ private static final String WORKFLOW = "workflow";
+ private static final String LEVEL = "level";
+ private static final String RUN_CONFIG = "runConfig";
+
+ public static final String CONTEXT_PATH = "/hop/execWorkflow";
+
+ public ExecWorkflowServlet() {}
+
+ public ExecWorkflowServlet(WorkflowMap workflowMap) {
+ super(workflowMap);
+ }
+
+ /**
+ * Executes a workflow from the specified file path.
+ *
+ * <p>Example Request:<br>
+ * GET /hop/execWorkflow/?workflow=/path/to/workflow.hwf&level=INFO
+ *
+ * <h3>Parameters</h3>
+ *
+ * <table>
+ * <tr><th>name</th><th>description</th><th>type</th></tr>
+ * <tr><td>workflow</td><td>File path to workflow (.hwf file)</td><td>query
(required)</td></tr>
+ * <tr><td>level</td><td>Logging level (i.e. Debug, Basic,
Detailed)</td><td>query (optional)</td></tr>
+ * <tr><td>runConfig</td><td>Run configuration name (optional, defaults to
default workflow run config)</td><td>query (optional)</td></tr>
+ * <tr><td>*any name*</td><td>All other parameters will be set as
variables/parameters in the workflow</td><td>query</td></tr>
+ * </table>
+ *
+ * <h3>Response</h3>
+ *
+ * Returns XML with webresult containing OK or ERROR status and message.
+ */
+ @Override
+ public void doGet(HttpServletRequest request, HttpServletResponse response)
+ throws ServletException, IOException {
+ if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
+ return;
+ }
+
+ if (log.isDebug()) {
+ logDebug("Execute workflow requested");
+ }
+
+ String[] knownOptions = new String[] {WORKFLOW, LEVEL, RUN_CONFIG};
+
+ String workflowOption = request.getParameter(WORKFLOW);
+ String levelOption = request.getParameter(LEVEL);
+ String runConfigOption = request.getParameter(RUN_CONFIG);
+
+ response.setStatus(HttpServletResponse.SC_OK);
+
+ String encoding = System.getProperty("HOP_DEFAULT_SERVLET_ENCODING", null);
+ if (encoding != null && !Utils.isEmpty(encoding.trim())) {
+ response.setCharacterEncoding(encoding);
+ response.setContentType("text/html; charset=" + encoding);
+ } else {
+ response.setContentType("text/xml");
+ response.setCharacterEncoding(Const.XML_ENCODING);
+ }
+
+ PrintWriter out = response.getWriter();
+
+ if (workflowOption == null) {
+ response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+ out.println(
+ new WebResult(WebResult.STRING_ERROR, "Missing mandatory parameter:
" + WORKFLOW));
+ return;
+ }
+
+ try {
+ // Get metadata provider from server config
+ IHopMetadataProvider metadataProvider =
getServerConfig().getMetadataProvider();
+ if (metadataProvider == null) {
+ throw new HopException("Metadata provider is not available");
+ }
+
+ // Resolve variables in the workflow path (e.g., ${PROJECT_HOME})
+ String resolvedWorkflowPath = variables.resolve(workflowOption);
+
+ // Load workflow from file
+ WorkflowMeta workflowMeta =
+ new WorkflowMeta(variables, resolvedWorkflowPath, metadataProvider);
+
+ // Set the servlet parameters as variables/parameters in the workflow
+ String[] parameters = workflowMeta.listParameters();
+ Enumeration<String> parameterNames = request.getParameterNames();
+ while (parameterNames.hasMoreElements()) {
+ String parameter = parameterNames.nextElement();
+ String[] values = request.getParameterValues(parameter);
+
+ // Ignore the known options, set the rest as variables/parameters
+ if (Const.indexOfString(parameter, knownOptions) < 0) {
+ // If it's a workflow parameter, it will be set later via
setParameterValue
+ // Otherwise, set as variable
+ if (Const.indexOfString(parameter, parameters) < 0) {
+ variables.setVariable(parameter, values[0]);
+ }
+ }
+ }
+
+ // Create execution configuration
+ WorkflowExecutionConfiguration workflowExecutionConfiguration =
+ new WorkflowExecutionConfiguration();
+
+ // Set logging level
+ LogLevel logLevel = null;
+ if (!Utils.isEmpty(levelOption)) {
+ logLevel = LogLevel.lookupCode(levelOption);
+ }
+ if (logLevel != null) {
+ workflowExecutionConfiguration.setLogLevel(logLevel);
+ }
+
+ // Determine run configuration
+ String runConfigurationName;
+ if (!StringUtils.isEmpty(runConfigOption)) {
+ // Use specified run configuration
+ runConfigurationName = runConfigOption;
+ } else {
+ // Try to find a default run configuration
+ WorkflowRunConfiguration defaultRunConfiguration =
+ WorkflowRunConfiguration.findDefault(metadataProvider);
+ if (defaultRunConfiguration != null) {
+ runConfigurationName = defaultRunConfiguration.getName();
+ } else {
+ // Fallback to "local" if no default is found
+ runConfigurationName = "local";
+ }
+ }
+ workflowExecutionConfiguration.setRunConfiguration(runConfigurationName);
+
+ String serverObjectId = UUID.randomUUID().toString();
+ SimpleLoggingObject servletLoggingObject =
+ new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.HOP_SERVER,
null);
+ servletLoggingObject.setContainerObjectId(serverObjectId);
+ if (logLevel != null) {
+ servletLoggingObject.setLogLevel(logLevel);
+ }
+
+ // Create the workflow engine using the run configuration from execution
configuration
+ IWorkflowEngine<WorkflowMeta> workflow =
+ WorkflowEngineFactory.createWorkflowEngine(
+ variables,
+
variables.resolve(workflowExecutionConfiguration.getRunConfiguration()),
+ metadataProvider,
+ workflowMeta,
+ servletLoggingObject);
+ workflow.setMetadataProvider(metadataProvider);
+
+ // Set parameters from request
+ workflow.copyParametersFromDefinitions(workflowMeta);
+ for (String parameter : parameters) {
+ String value = request.getParameter(parameter);
+ if (value != null) {
+ workflow.setParameterValue(parameter, value);
+ }
+ }
+ workflow.activateParameters(workflow);
+
+ // Create workflow configuration for registration
+ WorkflowConfiguration workflowConfiguration =
+ new WorkflowConfiguration(workflowMeta,
workflowExecutionConfiguration, metadataProvider);
+
+ // Register workflow in the map
+ getWorkflowMap()
+ .addWorkflow(workflowMeta.getName(), serverObjectId, workflow,
workflowConfiguration);
+ workflow.setContainerId(serverObjectId);
+
+ // Execute the workflow synchronously
+ executeWorkflow(workflow);
+
+ // Get logging output
+ String logging =
+ HopLogStore.getAppender().getBuffer(workflow.getLogChannelId(),
false).toString();
+
+ // Check for errors
+ Result result = workflow.getResult();
+ if (workflow.isFinished() && (result == null || result.getNrErrors() >
0)) {
+ response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ out.println(new WebResult(WebResult.STRING_ERROR, "Error executing
workflow: " + logging));
+ } else {
+ out.println(new WebResult(WebResult.STRING_OK, "Workflow executed
successfully"));
+ }
+ out.flush();
+
+ } catch (Exception ex) {
+ if (ex.getMessage() != null &&
ex.getMessage().contains(UNABLE_TO_FIND_WORKFLOW)) {
+ response.setStatus(HttpServletResponse.SC_NOT_FOUND);
+ out.println(
+ new WebResult(
+ WebResult.STRING_ERROR,
+ "Unable to find workflow: "
+ + workflowOption
+ + " (resolved: "
+ + (workflowOption != null ?
variables.resolve(workflowOption) : "null")
+ + ")"));
+ } else {
+ String logging = Const.getStackTracker(ex);
+ response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ out.println(
+ new WebResult(
+ WebResult.STRING_ERROR,
+ "Unexpected error executing workflow: " + Const.CR + logging));
+ }
+ }
+ }
+
+ protected void executeWorkflow(IWorkflowEngine<WorkflowMeta> workflow)
throws HopException {
+ // startExecution() is synchronous and returns the Result when finished
+ workflow.startExecution();
+ }
+
+ @Override
+ public String getContextPath() {
+ return CONTEXT_PATH;
+ }
+
+ @Override
+ public String getService() {
+ return CONTEXT_PATH + " (" + toString() + ")";
+ }
+
+ public String toString() {
+ return "Execute workflow from file";
+ }
+}
diff --git
a/integration-tests/hop_server/main-0006-test-exec-pipeline-workflow.hwf
b/integration-tests/hop_server/main-0006-test-exec-pipeline-workflow.hwf
new file mode 100644
index 0000000000..cc440da8c2
--- /dev/null
+++ b/integration-tests/hop_server/main-0006-test-exec-pipeline-workflow.hwf
@@ -0,0 +1,402 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-->
+<workflow>
+ <name>main-0006-test-exec-pipeline-workflow</name>
+ <name_sync_with_filename>Y</name_sync_with_filename>
+ <description>Integration tests for execPipeline and execWorkflow servlets
using samples project</description>
+ <extended_description/>
+ <workflow_version/>
+ <created_user>-</created_user>
+ <created_date>2024/01/01 00:00:00.000</created_date>
+ <modified_user>-</modified_user>
+ <modified_date>2024/01/01 00:00:00.000</modified_date>
+ <parameters>
+ </parameters>
+ <actions>
+ <action>
+ <name>Start</name>
+ <description/>
+ <type>SPECIAL</type>
+ <attributes/>
+ <DayOfMonth>1</DayOfMonth>
+ <doNotWaitOnFirstExecution>N</doNotWaitOnFirstExecution>
+ <hour>12</hour>
+ <intervalMinutes>60</intervalMinutes>
+ <intervalSeconds>0</intervalSeconds>
+ <minutes>0</minutes>
+ <repeat>N</repeat>
+ <schedulerType>0</schedulerType>
+ <weekDay>1</weekDay>
+ <parallel>N</parallel>
+ <xloc>50</xloc>
+ <yloc>50</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 1: ExecPipeline with absolute path to samples</name>
+ <description>Test execPipeline servlet with absolute file path to
samples pipeline</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster --fail -L -s
"http://hop-server:8181/hop/execPipeline?pipeline=/opt/hop/config/projects/samples/pipelines/pipeline-with-parameter.hpl&PRM_EXAMPLE=test_value&level=BASIC"
| grep -q "<result>OK</result>" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>50</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 2: ExecPipeline with PROJECT_HOME path to samples</name>
+ <description>Test execPipeline servlet with ${PROJECT_HOME} variable in
path to samples</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster --fail -L -s
"http://hop-server:8181/hop/execPipeline?pipeline=%24%7BPROJECT_HOME%7D/pipelines/pipeline-with-parameter.hpl&PRM_EXAMPLE=test_from_var&level=BASIC"
| grep -q "<result>OK</result>" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>128</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 3: ExecPipeline with absolute path to read-transactions</name>
+ <description>Test execPipeline servlet with absolute path to
read-transactions sample</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster --fail -L -s
"http://hop-server:8181/hop/execPipeline?pipeline=/opt/hop/config/projects/samples/pipelines/read-transactions.hpl&level=BASIC"
| grep -q "<result>OK</result>" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>208</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 4: ExecWorkflow with absolute path to samples</name>
+ <description>Test execWorkflow servlet with absolute file path to
samples workflow</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster --fail -L -s
"http://hop-server:8181/hop/execWorkflow?workflow=/opt/hop/config/projects/samples/workflows/parallel/parallel-workflow.hwf&level=BASIC"
| grep -q "<result>OK</result>" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>288</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 5: ExecWorkflow with PROJECT_HOME path to samples</name>
+ <description>Test execWorkflow servlet with ${PROJECT_HOME} variable in
path to samples</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster --fail -L -s
"http://hop-server:8181/hop/execWorkflow?workflow=%24%7BPROJECT_HOME%7D/workflows/parallel/parallel-workflow.hwf&level=BASIC"
| grep -q "<result>OK</result>" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>368</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 6: ExecPipeline error - missing parameter</name>
+ <description>Test execPipeline servlet error handling - missing pipeline
parameter</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster -L -s -w "%{http_code}"
"http://hop-server:8181/hop/execPipeline?level=BASIC" | grep -q "400" || exit
1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>448</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 8: ExecPipeline error - file not found</name>
+ <description>Test execPipeline servlet error handling - file not
found</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <filename/>
+ <insertScript>Y</insertScript>
+ <logext/>
+ <logfile/>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster -L -s -w "%{http_code}"
"http://hop-server:8181/hop/execPipeline?pipeline=/opt/hop/config/projects/samples/pipelines/nonexistent-pipeline.hpl"
| grep -q "404" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <work_directory/>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>608</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 7: ExecWorkflow error - missing parameter</name>
+ <description>Test execWorkflow servlet error handling - missing workflow
parameter</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <filename/>
+ <insertScript>Y</insertScript>
+ <logext/>
+ <logfile/>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster -L -s -w "%{http_code}"
"http://hop-server:8181/hop/execWorkflow?level=BASIC" | grep -q "400" || exit
1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <work_directory/>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>528</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Test 9: ExecWorkflow error - file not found</name>
+ <description>Test execWorkflow servlet error handling - file not
found</description>
+ <type>SHELL</type>
+ <attributes/>
+ <add_date>N</add_date>
+ <add_time>N</add_time>
+ <arg_from_previous>N</arg_from_previous>
+ <arguments>
+</arguments>
+ <exec_per_row>N</exec_per_row>
+ <insertScript>Y</insertScript>
+ <loglevel>Basic</loglevel>
+ <script>curl -u cluster:cluster -L -s -w "%{http_code}"
"http://hop-server:8181/hop/execWorkflow?workflow=/opt/hop/config/projects/samples/workflows/nonexistent-workflow.hwf"
| grep -q "404" || exit 1</script>
+ <set_append_logfile>N</set_append_logfile>
+ <set_logfile>N</set_logfile>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>688</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Abort workflow</name>
+ <description/>
+ <type>ABORT</type>
+ <attributes/>
+ <always_log_rows>N</always_log_rows>
+ <parallel>N</parallel>
+ <xloc>560</xloc>
+ <yloc>432</yloc>
+ <attributes_hac/>
+ </action>
+ <action>
+ <name>Success</name>
+ <description/>
+ <type>SUCCESS</type>
+ <attributes/>
+ <parallel>N</parallel>
+ <xloc>192</xloc>
+ <yloc>784</yloc>
+ <attributes_hac/>
+ </action>
+ </actions>
+ <hops>
+ <hop>
+ <from>Start</from>
+ <to>Test 1: ExecPipeline with absolute path to samples</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>Y</unconditional>
+ </hop>
+ <hop>
+ <from>Test 1: ExecPipeline with absolute path to samples</from>
+ <to>Test 2: ExecPipeline with PROJECT_HOME path to samples</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 2: ExecPipeline with PROJECT_HOME path to samples</from>
+ <to>Test 3: ExecPipeline with absolute path to read-transactions</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 3: ExecPipeline with absolute path to read-transactions</from>
+ <to>Test 4: ExecWorkflow with absolute path to samples</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 4: ExecWorkflow with absolute path to samples</from>
+ <to>Test 5: ExecWorkflow with PROJECT_HOME path to samples</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 5: ExecWorkflow with PROJECT_HOME path to samples</from>
+ <to>Test 6: ExecPipeline error - missing parameter</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 1: ExecPipeline with absolute path to samples</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 2: ExecPipeline with PROJECT_HOME path to samples</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 3: ExecPipeline with absolute path to read-transactions</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 4: ExecWorkflow with absolute path to samples</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 5: ExecWorkflow with PROJECT_HOME path to samples</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 6: ExecPipeline error - missing parameter</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 8: ExecPipeline error - file not found</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 9: ExecWorkflow error - file not found</from>
+ <to>Abort workflow</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 9: ExecWorkflow error - file not found</from>
+ <to>Success</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 8: ExecPipeline error - file not found</from>
+ <to>Test 9: ExecWorkflow error - file not found</to>
+ <enabled>Y</enabled>
+ <evaluation>N</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 6: ExecPipeline error - missing parameter</from>
+ <to>Test 7: ExecWorkflow error - missing parameter</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ <hop>
+ <from>Test 7: ExecWorkflow error - missing parameter</from>
+ <to>Test 8: ExecPipeline error - file not found</to>
+ <enabled>Y</enabled>
+ <evaluation>Y</evaluation>
+ <unconditional>N</unconditional>
+ </hop>
+ </hops>
+ <notepads>
+ </notepads>
+ <attributes/>
+</workflow>