http://git-wip-us.apache.org/repos/asf/airavata/blob/e13d90da/modules/registry/airavata-mongo-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java ---------------------------------------------------------------------- diff --git a/modules/registry/airavata-mongo-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java b/modules/registry/airavata-mongo-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java new file mode 100644 index 0000000..ef3152f --- /dev/null +++ b/modules/registry/airavata-mongo-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java @@ -0,0 +1,714 @@ +/* +* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +* +*/ + +package org.apache.airavata.persistance.registry.jpa.utils; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.airavata.common.utils.StringUtil; +import org.apache.airavata.model.appcatalog.appinterface.*; +import org.apache.airavata.model.workspace.Gateway; +import org.apache.airavata.model.workspace.Project; +import org.apache.airavata.model.workspace.experiment.ActionableGroup; +import org.apache.airavata.model.workspace.experiment.AdvancedInputDataHandling; +import org.apache.airavata.model.workspace.experiment.AdvancedOutputDataHandling; +import org.apache.airavata.model.workspace.experiment.ApplicationStatus; +import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling; +import org.apache.airavata.model.workspace.experiment.CorrectiveAction; +import org.apache.airavata.model.workspace.experiment.DataTransferDetails; +import org.apache.airavata.model.workspace.experiment.ErrorCategory; +import org.apache.airavata.model.workspace.experiment.ErrorDetails; +import org.apache.airavata.model.workspace.experiment.ExecutionUnit; +import org.apache.airavata.model.workspace.experiment.Experiment; +import org.apache.airavata.model.workspace.experiment.ExperimentState; +import org.apache.airavata.model.workspace.experiment.ExperimentStatus; +import org.apache.airavata.model.workspace.experiment.ExperimentSummary; +import org.apache.airavata.model.workspace.experiment.JobDetails; +import org.apache.airavata.model.workspace.experiment.JobState; +import org.apache.airavata.model.workspace.experiment.JobStatus; +import org.apache.airavata.model.workspace.experiment.QualityOfServiceParams; +import org.apache.airavata.model.workspace.experiment.TaskDetails; +import org.apache.airavata.model.workspace.experiment.TaskState; +import org.apache.airavata.model.workspace.experiment.TaskStatus; +import org.apache.airavata.model.workspace.experiment.TransferState; +import org.apache.airavata.model.workspace.experiment.TransferStatus; +import org.apache.airavata.model.workspace.experiment.UserConfigurationData; +import org.apache.airavata.model.workspace.experiment.WorkflowNodeDetails; +import org.apache.airavata.model.workspace.experiment.WorkflowNodeState; +import org.apache.airavata.model.workspace.experiment.WorkflowNodeStatus; +import org.apache.airavata.persistance.registry.jpa.Resource; +import org.apache.airavata.persistance.registry.jpa.ResourceType; +import org.apache.airavata.persistance.registry.jpa.resources.*; +import org.apache.airavata.registry.cpi.RegistryException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ThriftDataModelConversion { + private final static Logger logger = LoggerFactory.getLogger(ThriftDataModelConversion.class); + + public static Project getProject (ProjectResource pr) throws RegistryException { + if (pr != null) { + Project project = new Project(); + project.setProjectId(pr.getId()); + project.setName(pr.getName()); + if (pr.getCreationTime()!=null) { + project.setCreationTime(pr.getCreationTime().getTime()); + } + project.setDescription(pr.getDescription()); + project.setOwner(pr.getWorker().getUser()); + List<ProjectUserResource> projectUserList = pr.getProjectUserList(); + List<String> sharedUsers = new ArrayList<String>(); + if (projectUserList != null && !projectUserList.isEmpty()){ + for (ProjectUserResource resource : projectUserList){ + sharedUsers.add(resource.getUserName()); + } + } + project.setSharedUsers(sharedUsers); + return project; + } + return null; + } + + public static Gateway getGateway (GatewayResource resource){ + Gateway gateway = new Gateway(); + gateway.setGatewayId(resource.getGatewayId()); + gateway.setGatewayName(resource.getGatewayName()); + gateway.setDomain(resource.getDomain()); + gateway.setEmailAddress(resource.getEmailAddress()); + return gateway; + } + + public static List<Gateway> getAllGateways (List<Resource> gatewayList){ + List<Gateway> gateways = new ArrayList<Gateway>(); + for (Resource resource : gatewayList){ + gateways.add(getGateway((GatewayResource)resource)); + } + return gateways; + } + + + public static Experiment getExperiment(ExperimentResource experimentResource) throws RegistryException { + if (experimentResource != null){ + Experiment experiment = new Experiment(); + if (experimentResource.getProject()!= null){ + experiment.setProjectId(experimentResource.getProject().getId()); + } + experiment.setExperimentId(experimentResource.getExpID()); + experiment.setCreationTime(experimentResource.getCreationTime().getTime()); + experiment.setUserName(experimentResource.getExecutionUser()); + experiment.setName(experimentResource.getExpName()); + experiment.setDescription(experimentResource.getDescription()); + experiment.setApplicationId(experimentResource.getApplicationId()); + experiment.setApplicationVersion(experimentResource.getApplicationVersion()); + experiment.setWorkflowTemplateId(experimentResource.getWorkflowTemplateId()); + experiment.setEnableEmailNotification(experimentResource.isEnableEmailNotifications()); + experiment.setGatewayExecutionId(experimentResource.getGatewayExecutionId()); + if (experiment.isEnableEmailNotification()){ + List<NotificationEmailResource> notificationEmails = experimentResource.getNotificationEmails(); + experiment.setEmailAddresses(getEmailAddresses(notificationEmails)); + } + experiment.setWorkflowTemplateVersion(experimentResource.getWorkflowTemplateVersion()); + experiment.setWorkflowExecutionInstanceId(experimentResource.getWorkflowExecutionId()); + List<ExperimentInputResource> experimentInputs = experimentResource.getExperimentInputs(); + experiment.setExperimentInputs(getExpInputs(experimentInputs)); + List<ExperimentOutputResource> experimentOutputs = experimentResource.getExperimentOutputs(); + experiment.setExperimentOutputs(getExpOutputs(experimentOutputs)); + StatusResource experimentStatus = experimentResource.getExperimentStatus(); + if (experimentStatus != null){ + experiment.setExperimentStatus(getExperimentStatus(experimentStatus)); + } + List<StatusResource> changeList = experimentResource.getWorkflowNodeStatuses(); + if (changeList != null && !changeList.isEmpty()){ + experiment.setStateChangeList(getWorkflowNodeStatusList(changeList)); + } + + List<WorkflowNodeDetailResource> workflowNodeDetails = experimentResource.getWorkflowNodeDetails(); + if (workflowNodeDetails != null && !workflowNodeDetails.isEmpty()){ + experiment.setWorkflowNodeDetailsList(getWfNodeList(workflowNodeDetails)); + } + List<ErrorDetailResource> errorDetails = experimentResource.getErrorDetails(); + if (errorDetails!= null && !errorDetails.isEmpty()){ + experiment.setErrors(getErrorDetailList(errorDetails)); + } + String expID = experimentResource.getExpID(); + if (experimentResource.isExists(ResourceType.CONFIG_DATA, expID)){ + ConfigDataResource userConfigData = experimentResource.getUserConfigData(expID); + experiment.setUserConfigurationData(getUserConfigData(userConfigData)); + } + return experiment; + } + return null; + } + + public static ExperimentSummary getExperimentSummary(ExperimentResource experimentResource) throws RegistryException { + if (experimentResource != null){ + ExperimentSummary experimentSummary = new ExperimentSummary(); + if (experimentResource.getProject()!= null){ + experimentSummary.setProjectId(experimentResource.getProject().getId()); + } + experimentSummary.setExperimentId(experimentResource.getExpID()); + experimentSummary.setCreationTime(experimentResource.getCreationTime().getTime()); + experimentSummary.setUserName(experimentResource.getExecutionUser()); + experimentSummary.setName(experimentResource.getExpName()); + experimentSummary.setDescription(experimentResource.getDescription()); + experimentSummary.setApplicationId(experimentResource.getApplicationId()); + StatusResource experimentStatus = experimentResource.getExperimentStatus(); + if (experimentStatus != null){ + experimentSummary.setExperimentStatus(getExperimentStatus(experimentStatus)); + } + List<ErrorDetailResource> errorDetails = experimentResource.getErrorDetails(); + if (errorDetails!= null && !errorDetails.isEmpty()){ + experimentSummary.setErrors(getErrorDetailList(errorDetails)); + } + return experimentSummary; + } + return null; + } + + public static InputDataObjectType getInput(Object object){ + if (object != null){ + InputDataObjectType dataObjectType = new InputDataObjectType(); + if (object instanceof ExperimentInputResource){ + ExperimentInputResource expInput = (ExperimentInputResource) object; + dataObjectType.setName(expInput.getExperimentKey()); + dataObjectType.setValue(expInput.getValue()); + if (expInput.getDataType() != null){ + dataObjectType.setType(DataType.valueOf(expInput.getDataType())); + } + dataObjectType.setMetaData(expInput.getMetadata()); + dataObjectType.setApplicationArgument(expInput.getAppArgument()); + dataObjectType.setStandardInput(expInput.isStandardInput()); + dataObjectType.setUserFriendlyDescription(expInput.getUserFriendlyDesc()); + dataObjectType.setInputOrder(expInput.getInputOrder()); + dataObjectType.setIsRequired(expInput.getRequired()); + dataObjectType.setRequiredToAddedToCommandLine(expInput.getRequiredToCMD()); + dataObjectType.setDataStaged(expInput.isDataStaged()); + return dataObjectType; + }else if (object instanceof NodeInputResource){ + NodeInputResource nodeInputResource = (NodeInputResource)object; + dataObjectType.setName(nodeInputResource.getInputKey()); + dataObjectType.setValue(nodeInputResource.getValue()); + if (nodeInputResource.getDataType() != null){ + dataObjectType.setType(DataType.valueOf(nodeInputResource.getDataType())); + } + dataObjectType.setMetaData(nodeInputResource.getMetadata()); + dataObjectType.setApplicationArgument(nodeInputResource.getAppArgument()); + dataObjectType.setStandardInput(nodeInputResource.isStandardInput()); + dataObjectType.setUserFriendlyDescription(nodeInputResource.getUserFriendlyDesc()); + dataObjectType.setInputOrder(nodeInputResource.getInputOrder()); + dataObjectType.setIsRequired(nodeInputResource.getRequired()); + dataObjectType.setRequiredToAddedToCommandLine(nodeInputResource.getRequiredToCMD()); + dataObjectType.setDataStaged(nodeInputResource.isDataStaged()); + return dataObjectType; + }else if (object instanceof ApplicationInputResource){ + ApplicationInputResource inputResource = (ApplicationInputResource)object; + dataObjectType.setName(inputResource.getInputKey()); + dataObjectType.setValue(inputResource.getValue()); + if (inputResource.getDataType() != null){ + dataObjectType.setType(DataType.valueOf(inputResource.getDataType())); + } + dataObjectType.setMetaData(inputResource.getMetadata()); + dataObjectType.setApplicationArgument(inputResource.getAppArgument()); + dataObjectType.setStandardInput(inputResource.isStandardInput()); + dataObjectType.setUserFriendlyDescription(inputResource.getUserFriendlyDesc()); + dataObjectType.setInputOrder(inputResource.getInputOrder()); + dataObjectType.setIsRequired(inputResource.isRequired()); + dataObjectType.setRequiredToAddedToCommandLine(inputResource.isRequiredToCMD()); + dataObjectType.setDataStaged(inputResource.isDataStaged()); + return dataObjectType; + }else { + return null; + } + } + return null; + } + + public static OutputDataObjectType getOutput(Object object){ + if (object != null){ + OutputDataObjectType dataObjectType = new OutputDataObjectType(); + if (object instanceof ExperimentOutputResource){ + ExperimentOutputResource expOutput = (ExperimentOutputResource)object; + dataObjectType.setName(expOutput.getExperimentKey()); + dataObjectType.setValue(expOutput.getValue()); + if (expOutput.getDataType() != null){ + dataObjectType.setType(DataType.valueOf(expOutput.getDataType())); + } + dataObjectType.setIsRequired(expOutput.getRequired()); + dataObjectType.setRequiredToAddedToCommandLine(expOutput.getRequiredToCMD()); + dataObjectType.setDataMovement(expOutput.isDataMovement()); + dataObjectType.setLocation(expOutput.getDataNameLocation()); + dataObjectType.setSearchQuery(expOutput.getSearchQuery()); + dataObjectType.setApplicationArgument(expOutput.getAppArgument()); + return dataObjectType; + }else if (object instanceof NodeOutputResource){ + NodeOutputResource nodeOutputResource = (NodeOutputResource)object; + dataObjectType.setName(nodeOutputResource.getOutputKey()); + dataObjectType.setValue(nodeOutputResource.getValue()); + if (nodeOutputResource.getDataType() != null){ + dataObjectType.setType(DataType.valueOf(nodeOutputResource.getDataType())); + } + dataObjectType.setIsRequired(nodeOutputResource.getRequired()); + dataObjectType.setRequiredToAddedToCommandLine(nodeOutputResource.getRequiredToCMD()); + dataObjectType.setDataMovement(nodeOutputResource.isDataMovement()); + dataObjectType.setLocation(nodeOutputResource.getDataNameLocation()); + dataObjectType.setSearchQuery(nodeOutputResource.getSearchQuery()); + dataObjectType.setApplicationArgument(nodeOutputResource.getAppArgument()); + return dataObjectType; + }else if (object instanceof ApplicationOutputResource){ + ApplicationOutputResource outputResource = (ApplicationOutputResource)object; + dataObjectType.setName(outputResource.getOutputKey()); + dataObjectType.setValue(outputResource.getValue()); + dataObjectType.setIsRequired(outputResource.isRequired()); + dataObjectType.setRequiredToAddedToCommandLine(outputResource.isRequiredToCMD()); + if (outputResource.getDataType() != null){ + dataObjectType.setType(DataType.valueOf(outputResource.getDataType())); + } + dataObjectType.setDataMovement(outputResource.isDataMovement()); + dataObjectType.setLocation(outputResource.getDataNameLocation()); + dataObjectType.setSearchQuery(outputResource.getSearchQuery()); + dataObjectType.setApplicationArgument(outputResource.getAppArgument()); + return dataObjectType; + }else { + return null; + } + } + return null; + } + + public static List<String> getEmailAddresses (List<NotificationEmailResource> resourceList){ + List<String> emailAddresses = new ArrayList<String>(); + if (resourceList != null && !resourceList.isEmpty()){ + for (NotificationEmailResource emailResource : resourceList){ + emailAddresses.add(emailResource.getEmailAddress()); + } + } + return emailAddresses; + } + + public static List<InputDataObjectType> getExpInputs (List<ExperimentInputResource> exInputList){ + List<InputDataObjectType> expInputs = new ArrayList<InputDataObjectType>(); + if (exInputList != null && !exInputList.isEmpty()){ + for (ExperimentInputResource inputResource : exInputList){ + InputDataObjectType exInput = getInput(inputResource); + expInputs.add(exInput); + } + } + return expInputs; + } + + public static List<OutputDataObjectType> getExpOutputs (List<ExperimentOutputResource> experimentOutputResourceList){ + List<OutputDataObjectType> exOutputs = new ArrayList<OutputDataObjectType>(); + if (experimentOutputResourceList != null && !experimentOutputResourceList.isEmpty()){ + for (ExperimentOutputResource outputResource : experimentOutputResourceList){ + OutputDataObjectType output = getOutput(outputResource); + exOutputs.add(output); + } + } + return exOutputs; + } + + public static List<InputDataObjectType> getNodeInputs (List<NodeInputResource> nodeInputResources){ + List<InputDataObjectType> nodeInputs = new ArrayList<InputDataObjectType>(); + if (nodeInputResources != null && !nodeInputResources.isEmpty()){ + for (NodeInputResource inputResource : nodeInputResources){ + InputDataObjectType nodeInput = getInput(inputResource); + nodeInputs.add(nodeInput); + } + } + return nodeInputs; + } + + public static List<OutputDataObjectType> getNodeOutputs (List<NodeOutputResource> nodeOutputResourceList){ + List<OutputDataObjectType> nodeOutputs = new ArrayList<OutputDataObjectType>(); + if (nodeOutputResourceList != null && !nodeOutputResourceList.isEmpty()){ + for (NodeOutputResource outputResource : nodeOutputResourceList){ + OutputDataObjectType output = getOutput(outputResource); + nodeOutputs.add(output); + } + } + return nodeOutputs; + } + + public static List<InputDataObjectType> getApplicationInputs (List<ApplicationInputResource> applicationInputResources){ + List<InputDataObjectType> appInputs = new ArrayList<InputDataObjectType>(); + if (applicationInputResources != null && !applicationInputResources.isEmpty()){ + for (ApplicationInputResource inputResource : applicationInputResources){ + InputDataObjectType appInput = getInput(inputResource); + appInputs.add(appInput); + } + } + return appInputs; + } + + public static List<OutputDataObjectType> getApplicationOutputs (List<ApplicationOutputResource> outputResources){ + List<OutputDataObjectType> appOutputs = new ArrayList<OutputDataObjectType>(); + if (outputResources != null && !outputResources.isEmpty()){ + for (ApplicationOutputResource outputResource : outputResources){ + OutputDataObjectType output = getOutput(outputResource); + appOutputs.add(output); + } + } + return appOutputs; + } + + public static ExperimentStatus getExperimentStatus(StatusResource status){ + if (status != null){ + ExperimentStatus experimentStatus = new ExperimentStatus(); + if (status.getState() == null || status.getState().equals("")){ + status.setState("UNKNOWN"); + } + experimentStatus.setExperimentState(ExperimentState.valueOf(status.getState())); + experimentStatus.setTimeOfStateChange(status.getStatusUpdateTime().getTime()); + return experimentStatus; + } + return null; + } + + public static WorkflowNodeStatus getWorkflowNodeStatus (StatusResource status){ + if (status != null){ + WorkflowNodeStatus workflowNodeStatus = new WorkflowNodeStatus(); + if (status.getState() == null || status.getState().equals("")){ + status.setState("UNKNOWN"); + } + workflowNodeStatus.setWorkflowNodeState(WorkflowNodeState.valueOf(status.getState())); + workflowNodeStatus.setTimeOfStateChange(status.getStatusUpdateTime().getTime()); + return workflowNodeStatus; + } + return null; + } + + public static TaskStatus getTaskStatus (StatusResource status){ + if (status != null){ + TaskStatus taskStatus = new TaskStatus(); + if (status.getState() == null || status.getState().equals("")){ + status.setState("UNKNOWN"); + } + taskStatus.setExecutionState(TaskState.valueOf(status.getState())); + taskStatus.setTimeOfStateChange(status.getStatusUpdateTime().getTime()); + return taskStatus; + } + return null; + } + + public static JobStatus getJobStatus (StatusResource status){ + if (status != null){ + JobStatus jobStatus = new JobStatus(); + if (status.getState() == null || status.getState().equals("")){ + status.setState("UNKNOWN"); + } + jobStatus.setJobState(JobState.valueOf(status.getState())); + jobStatus.setTimeOfStateChange(status.getStatusUpdateTime().getTime()); + return jobStatus; + } + return null; + } + + public static TransferStatus getTransferStatus (StatusResource status){ + if (status != null){ + TransferStatus transferStatus = new TransferStatus(); + if (status.getState() == null || status.getState().equals("")){ + status.setState("UNKNOWN"); + } + transferStatus.setTransferState(TransferState.valueOf(status.getState())); + transferStatus.setTimeOfStateChange(status.getStatusUpdateTime().getTime()); + return transferStatus; + } + return null; + } + + public static ApplicationStatus getApplicationStatus (StatusResource status){ + if (status != null){ + ApplicationStatus applicationStatus = new ApplicationStatus(); + if (status.getState() == null || status.getState().equals("")){ + status.setState("UNKNOWN"); + } + applicationStatus.setApplicationState(status.getState()); + applicationStatus.setTimeOfStateChange(status.getStatusUpdateTime().getTime()); + return applicationStatus; + } + return null; + } + + public static List<WorkflowNodeStatus> getWorkflowNodeStatusList(List<StatusResource> statuses){ + List<WorkflowNodeStatus> wfNodeStatuses = new ArrayList<WorkflowNodeStatus>(); + if (statuses != null && !statuses.isEmpty()){ + for (StatusResource statusResource : statuses){ + wfNodeStatuses.add(getWorkflowNodeStatus(statusResource)); + } + } + return wfNodeStatuses; + } + + public static WorkflowNodeDetails getWorkflowNodeDetails(WorkflowNodeDetailResource nodeDetailResource) throws RegistryException { + if (nodeDetailResource != null){ + WorkflowNodeDetails wfNode = new WorkflowNodeDetails(); + wfNode.setNodeInstanceId(nodeDetailResource.getNodeInstanceId()); + wfNode.setCreationTime(nodeDetailResource.getCreationTime().getTime()); + wfNode.setNodeName(nodeDetailResource.getNodeName()); + List<NodeInputResource> nodeInputs = nodeDetailResource.getNodeInputs(); + wfNode.setNodeInputs(getNodeInputs(nodeInputs)); + List<NodeOutputResource> nodeOutputs = nodeDetailResource.getNodeOutputs(); + wfNode.setNodeOutputs(getNodeOutputs(nodeOutputs)); + List<TaskDetailResource> taskDetails = nodeDetailResource.getTaskDetails(); + wfNode.setTaskDetailsList(getTaskDetailsList(taskDetails)); + wfNode.setWorkflowNodeStatus(getWorkflowNodeStatus(nodeDetailResource.getWorkflowNodeStatus())); + List<ErrorDetailResource> errorDetails = nodeDetailResource.getErrorDetails(); + wfNode.setErrors(getErrorDetailList(errorDetails)); + wfNode.setExecutionUnit(ExecutionUnit.valueOf(nodeDetailResource.getExecutionUnit())); + wfNode.setExecutionUnitData(nodeDetailResource.getExecutionUnitData()); + return wfNode; + } + return null; + } + + public static List<WorkflowNodeDetails> getWfNodeList (List<WorkflowNodeDetailResource> resources) throws RegistryException { + List<WorkflowNodeDetails> workflowNodeDetailsList = new ArrayList<WorkflowNodeDetails>(); + if (resources != null && !resources.isEmpty()){ + for (WorkflowNodeDetailResource resource : resources){ + workflowNodeDetailsList.add(getWorkflowNodeDetails(resource)); + } + } + return workflowNodeDetailsList; + } + + public static TaskDetails getTaskDetail (TaskDetailResource taskDetailResource) throws RegistryException { + if (taskDetailResource != null){ + TaskDetails taskDetails = new TaskDetails(); + String taskId = taskDetailResource.getTaskId(); + taskDetails.setTaskId(taskId); + taskDetails.setApplicationId(taskDetailResource.getApplicationId()); + taskDetails.setApplicationVersion(taskDetailResource.getApplicationVersion()); + List<ApplicationInputResource> applicationInputs = taskDetailResource.getApplicationInputs(); + taskDetails.setApplicationInputs(getApplicationInputs(applicationInputs)); + List<ApplicationOutputResource> applicationOutputs = taskDetailResource.getApplicationOutputs(); + taskDetails.setApplicationOutputs(getApplicationOutputs(applicationOutputs)); + taskDetails.setEnableEmailNotification(taskDetailResource.isEnableEmailNotifications()); + if (taskDetails.isEnableEmailNotification()){ + List<NotificationEmailResource> notificationEmails = taskDetailResource.getNotificationEmails(); + taskDetails.setEmailAddresses(getEmailAddresses(notificationEmails)); + } + taskDetails.setApplicationDeploymentId(taskDetailResource.getApplicationDeploymentId()); + if (taskDetailResource.isExists(ResourceType.COMPUTATIONAL_RESOURCE_SCHEDULING, taskId)){ + ComputationSchedulingResource computationScheduling = taskDetailResource.getComputationScheduling(taskId); + taskDetails.setTaskScheduling(getComputationalResourceScheduling(computationScheduling)); + } + + if (taskDetailResource.isExists(ResourceType.ADVANCE_INPUT_DATA_HANDLING, taskId)){ + AdvanceInputDataHandlingResource inputDataHandling = taskDetailResource.getInputDataHandling(taskId); + taskDetails.setAdvancedInputDataHandling(getAdvanceInputDataHandling(inputDataHandling)); + } + + if (taskDetailResource.isExists(ResourceType.ADVANCE_OUTPUT_DATA_HANDLING, taskId)){ + AdvancedOutputDataHandlingResource outputDataHandling = taskDetailResource.getOutputDataHandling(taskId); + taskDetails.setAdvancedOutputDataHandling(getAdvanceOutputDataHandling(outputDataHandling)); + } + + taskDetails.setTaskStatus(getTaskStatus(taskDetailResource.getTaskStatus())); + List<JobDetailResource> jobDetailList = taskDetailResource.getJobDetailList(); + taskDetails.setJobDetailsList(getJobDetailsList(jobDetailList)); + taskDetails.setErrors(getErrorDetailList(taskDetailResource.getErrorDetailList())); + taskDetails.setDataTransferDetailsList(getDataTransferlList(taskDetailResource.getDataTransferDetailList())); + return taskDetails; + } + return null; + } + + public static List<TaskDetails> getTaskDetailsList (List<TaskDetailResource> resources) throws RegistryException { + List<TaskDetails> taskDetailsList = new ArrayList<TaskDetails>(); + if (resources != null && !resources.isEmpty()){ + for (TaskDetailResource resource : resources){ + taskDetailsList.add(getTaskDetail(resource)); + } + } + return taskDetailsList; + } + + public static List<JobDetails> getJobDetailsList(List<JobDetailResource> jobs) throws RegistryException { + List<JobDetails> jobDetailsList = new ArrayList<JobDetails>(); + if (jobs != null && !jobs.isEmpty()){ + for (JobDetailResource resource : jobs){ + jobDetailsList.add(getJobDetail(resource)); + } + } + return jobDetailsList; + } + + + public static JobDetails getJobDetail(JobDetailResource jobDetailResource) throws RegistryException { + if (jobDetailResource != null){ + JobDetails jobDetails = new JobDetails(); + jobDetails.setJobId(jobDetailResource.getJobId()); + jobDetails.setJobDescription(jobDetailResource.getJobDescription()); + jobDetails.setCreationTime(jobDetailResource.getCreationTime().getTime()); + StatusResource jobStatus = jobDetailResource.getJobStatus(); + jobDetails.setJobStatus(getJobStatus(jobStatus)); + jobDetails.setJobName(jobDetailResource.getJobName()); + jobDetails.setWorkingDir(jobDetailResource.getWorkingDir()); + StatusResource applicationStatus = jobDetailResource.getApplicationStatus(); + jobDetails.setApplicationStatus(getApplicationStatus(applicationStatus)); + List<ErrorDetailResource> errorDetails = jobDetailResource.getErrorDetails(); + jobDetails.setErrors(getErrorDetailList(errorDetails)); + jobDetails.setComputeResourceConsumed(jobDetailResource.getComputeResourceConsumed()); + return jobDetails; + } + return null; + } + + public static ErrorDetails getErrorDetails (ErrorDetailResource resource){ + if (resource != null){ + ErrorDetails errorDetails = new ErrorDetails(); + errorDetails.setErrorId(String.valueOf(resource.getErrorId())); + errorDetails.setCreationTime(resource.getCreationTime().getTime()); + errorDetails.setActualErrorMessage(resource.getActualErrorMsg()); + errorDetails.setUserFriendlyMessage(resource.getUserFriendlyErrorMsg()); + errorDetails.setErrorCategory(ErrorCategory.valueOf(resource.getErrorCategory())); + errorDetails.setTransientOrPersistent(resource.isTransientPersistent()); + errorDetails.setCorrectiveAction(CorrectiveAction.valueOf(resource.getCorrectiveAction())); + errorDetails.setActionableGroup(ActionableGroup.valueOf(resource.getActionableGroup())); + return errorDetails; + } + return null; + } + + public static List<ErrorDetails> getErrorDetailList (List<ErrorDetailResource> errorDetailResources){ + List<ErrorDetails> errorDetailsList = new ArrayList<ErrorDetails>(); + if (errorDetailResources != null && !errorDetailResources.isEmpty()){ + for (ErrorDetailResource errorDetailResource : errorDetailResources){ + errorDetailsList.add(getErrorDetails(errorDetailResource)); + } + } + return errorDetailsList; + } + + public static DataTransferDetails getDataTransferDetail (DataTransferDetailResource resource) throws RegistryException { + if (resource != null){ + DataTransferDetails details = new DataTransferDetails(); + details.setTransferId(resource.getTransferId()); + details.setCreationTime(resource.getCreationTime().getTime()); + details.setTransferDescription(resource.getTransferDescription()); + details.setTransferStatus(getTransferStatus(resource.getDataTransferStatus())); + return details; + } + return null; + } + + public static List<DataTransferDetails> getDataTransferlList (List<DataTransferDetailResource> resources) throws RegistryException { + List<DataTransferDetails> transferDetailsList = new ArrayList<DataTransferDetails>(); + if (resources != null && !resources.isEmpty()){ + for (DataTransferDetailResource resource : resources){ + transferDetailsList.add(getDataTransferDetail(resource)); + } + } + return transferDetailsList; + } + + + public static UserConfigurationData getUserConfigData (ConfigDataResource resource) throws RegistryException { + if (resource != null){ + UserConfigurationData data = new UserConfigurationData(); + data.setAiravataAutoSchedule(resource.isAiravataAutoSchedule()); + data.setOverrideManualScheduledParams(resource.isOverrideManualParams()); + data.setShareExperimentPublicly(resource.isShareExp()); + data.setUserDn(resource.getUserDn()); + data.setGenerateCert(resource.isGenerateCert()); + ExperimentResource experimentResource = resource.getExperimentResource(); + String expID = experimentResource.getExpID(); + if (experimentResource.isExists(ResourceType.COMPUTATIONAL_RESOURCE_SCHEDULING, expID)){ + ComputationSchedulingResource computationScheduling = experimentResource.getComputationScheduling(expID); + data.setComputationalResourceScheduling(getComputationalResourceScheduling(computationScheduling)); + } + + if (experimentResource.isExists(ResourceType.ADVANCE_INPUT_DATA_HANDLING, expID)){ + AdvanceInputDataHandlingResource inputDataHandling = experimentResource.getInputDataHandling(expID); + data.setAdvanceInputDataHandling(getAdvanceInputDataHandling(inputDataHandling)); + } + + if (experimentResource.isExists(ResourceType.ADVANCE_OUTPUT_DATA_HANDLING, expID)){ + AdvancedOutputDataHandlingResource outputDataHandling = experimentResource.getOutputDataHandling(expID); + data.setAdvanceOutputDataHandling(getAdvanceOutputDataHandling(outputDataHandling)); + } + + if (experimentResource.isExists(ResourceType.QOS_PARAM, expID)){ + QosParamResource qoSparams = experimentResource.getQOSparams(expID); + data.setQosParams(getQOSParams(qoSparams)); + } + return data; + } + return null; + } + + + public static ComputationalResourceScheduling getComputationalResourceScheduling (ComputationSchedulingResource csr){ + if (csr != null){ + ComputationalResourceScheduling scheduling = new ComputationalResourceScheduling(); + scheduling.setResourceHostId(csr.getResourceHostId()); + scheduling.setTotalCpuCount(csr.getCpuCount()); + scheduling.setNodeCount(csr.getNodeCount()); + scheduling.setNumberOfThreads(csr.getNumberOfThreads()); + scheduling.setQueueName(csr.getQueueName()); + scheduling.setWallTimeLimit(csr.getWalltimeLimit()); + scheduling.setJobStartTime((int)csr.getJobStartTime().getTime()); + scheduling.setTotalPhysicalMemory(csr.getPhysicalMemory()); + scheduling.setComputationalProjectAccount(csr.getProjectName()); + scheduling.setChassisName(csr.getChessisName()); + return scheduling; + } + return null; + } + + public static AdvancedInputDataHandling getAdvanceInputDataHandling(AdvanceInputDataHandlingResource adhr){ + if (adhr != null){ + AdvancedInputDataHandling adih = new AdvancedInputDataHandling(); + adih.setStageInputFilesToWorkingDir(adhr.isStageInputFiles()); + adih.setParentWorkingDirectory(adhr.getWorkingDirParent()); + adih.setUniqueWorkingDirectory(adhr.getWorkingDir()); + adih.setCleanUpWorkingDirAfterJob(adhr.isCleanAfterJob()); + return adih; + } + return null; + } + + public static AdvancedOutputDataHandling getAdvanceOutputDataHandling(AdvancedOutputDataHandlingResource adodh){ + if (adodh != null){ + AdvancedOutputDataHandling outputDataHandling = new AdvancedOutputDataHandling(); + outputDataHandling.setOutputDataDir(adodh.getOutputDataDir()); + outputDataHandling.setDataRegistryURL(adodh.getDataRegUrl()); + outputDataHandling.setPersistOutputData(adodh.isPersistOutputData()); + return outputDataHandling; + } + return null; + } + + public static QualityOfServiceParams getQOSParams (QosParamResource qos){ + if (qos != null){ + QualityOfServiceParams qosParams = new QualityOfServiceParams(); + qosParams.setStartExecutionAt(qos.getStartExecutionAt()); + qosParams.setExecuteBefore(qos.getExecuteBefore()); + qosParams.setNumberofRetries(qos.getNoOfRetries()); + return qosParams; + } + return null; + } + + + +}
http://git-wip-us.apache.org/repos/asf/airavata/blob/e13d90da/modules/registry/airavata-mongo-registry/src/main/resources/META-INF/persistence.xml ---------------------------------------------------------------------- diff --git a/modules/registry/airavata-mongo-registry/src/main/resources/META-INF/persistence.xml b/modules/registry/airavata-mongo-registry/src/main/resources/META-INF/persistence.xml new file mode 100644 index 0000000..2ba8ce4 --- /dev/null +++ b/modules/registry/airavata-mongo-registry/src/main/resources/META-INF/persistence.xml @@ -0,0 +1,65 @@ +<?xml version="1.0"?> +<!--* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * +* --> +<persistence xmlns="http://java.sun.com/xml/ns/persistence" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.0"> + <persistence-unit name="airavata_data"> + <provider>org.apache.openjpa.persistence.PersistenceProviderImpl</provider> + <class>org.apache.airavata.persistance.registry.jpa.model.Gateway</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Configuration</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Users</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Gateway_Worker</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Project</class> + <class>org.apache.airavata.persistance.registry.jpa.model.ProjectUser</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Experiment</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Notification_Email</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Input</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Output</class> + <class>org.apache.airavata.persistance.registry.jpa.model.WorkflowNodeDetail</class> + <class>org.apache.airavata.persistance.registry.jpa.model.TaskDetail</class> + <class>org.apache.airavata.persistance.registry.jpa.model.ErrorDetail</class> + <class>org.apache.airavata.persistance.registry.jpa.model.ApplicationInput</class> + <class>org.apache.airavata.persistance.registry.jpa.model.ApplicationOutput</class> + <class>org.apache.airavata.persistance.registry.jpa.model.NodeInput</class> + <class>org.apache.airavata.persistance.registry.jpa.model.NodeOutput</class> + <class>org.apache.airavata.persistance.registry.jpa.model.JobDetail</class> + <class>org.apache.airavata.persistance.registry.jpa.model.DataTransferDetail</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Status</class> + <class>org.apache.airavata.persistance.registry.jpa.model.ExperimentConfigData</class> + <class>org.apache.airavata.persistance.registry.jpa.model.Computational_Resource_Scheduling</class> + <class>org.apache.airavata.persistance.registry.jpa.model.AdvancedInputDataHandling</class> + <class>org.apache.airavata.persistance.registry.jpa.model.AdvancedOutputDataHandling</class> + <class>org.apache.airavata.persistance.registry.jpa.model.QosParam</class> + <exclude-unlisted-classes>true</exclude-unlisted-classes> + <properties> + <property name="openjpa.ConnectionURL" + value="jdbc:mysql://localhost:3306/persitant_data" /> + <property name="openjpa.ConnectionDriverName" value="com.mysql.jdbc.Driver" /> + <property name="openjpa.ConnectionUserName" value="airavata" /> + <property name="openjpa.ConnectionPassword" value="airavata" /> + <property name="openjpa.DynamicEnhancementAgent" value="true" /> + <property name="openjpa.RuntimeUnenhancedClasses" value="supported" /> + <property name="openjpa.Log" value="SQL=TRACE" /> + <property name="openjpa.ConnectionFactoryProperties" + value="PrettyPrint=true, PrettyPrintLineLength=72, PrintParameters=true, MaxActive=10, MaxIdle=5, MinIdle=2, MaxWait=60000" /> + </properties> + </persistence-unit> +</persistence> http://git-wip-us.apache.org/repos/asf/airavata/blob/e13d90da/modules/registry/airavata-mongo-registry/src/main/resources/airavata-server.properties ---------------------------------------------------------------------- diff --git a/modules/registry/airavata-mongo-registry/src/main/resources/airavata-server.properties b/modules/registry/airavata-mongo-registry/src/main/resources/airavata-server.properties new file mode 100644 index 0000000..0bdb522 --- /dev/null +++ b/modules/registry/airavata-mongo-registry/src/main/resources/airavata-server.properties @@ -0,0 +1,280 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +########################################################################### +# +# This properties file provides configuration for all Airavata Services: +# API Server, Registry, Workflow Interpreter, GFac, Orchestrator +# +########################################################################### + +########################################################################### +# API Server Registry Configuration +########################################################################### + +#for derby [AiravataJPARegistry] +#registry.jdbc.driver=org.apache.derby.jdbc.ClientDriver +#registry.jdbc.url=jdbc:derby://localhost:1527/experiment_catalog;create=true;user=airavata;password=airavata +# MySql database configuration +registry.jdbc.driver=com.mysql.jdbc.Driver +registry.jdbc.url=jdbc:mysql://localhost:3306/airavata-registry +registry.jdbc.user=root +registry.jdbc.password= +start.derby.server.mode=true +validationQuery=SELECT 1 from CONFIGURATION +cache.enable=true +jpa.cache.size=5000 +#jpa.connection.properties=MaxActive=10,MaxIdle=5,MinIdle=2,MaxWait=60000,testWhileIdle=true,testOnBorrow=true + +# Properties for default user mode +default.registry.user=admin +default.registry.password=admin +default.registry.password.hash.method=SHA +default.registry.gateway=php_reference_gateway + +#ip=127.0.0.1 + +########################################################################### +# Application Catalog DB Configuration +########################################################################### +#for derby [AiravataJPARegistry] +appcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver +appcatalog.jdbc.url=jdbc:derby://localhost:1527/app_catalog;create=true;user=airavata;password=airavata +# MySql database configuration +#appcatalog.jdbc.driver=com.mysql.jdbc.Driver +#appcatalog.jdbc.url=jdbc:mysql://localhost:3306/app_catalog +appcatalog.jdbc.user=airavata +appcatalog.jdbc.password=airavata +appcatalog.validationQuery=SELECT 1 from CONFIGURATION + +########################################################################### +# Server module Configuration +########################################################################### + +servers=apiserver,orchestrator,gfac,credentialstore +#shutdown.trategy=NONE +shutdown.trategy=SELF_TERMINATE + + +apiserver.server.host=localhost +apiserver.server.port=8930 +apiserver.server.min.threads=50 +orchestrator.server.host=localhost +orchestrator.server.port=8940 +gfac.server.host=localhost +gfac.server.port=8950 +orchestrator.server.min.threads=50 + +########################################################################### +# Job Scheduler can send informative email messages to you about the status of your job. +# Specify a string which consists of either the single character "n" (no mail), or one or more +# of the characters "a" (send mail when job is aborted), "b" (send mail when job begins), +# and "e" (send mail when job terminates). The default is "a" if not specified. +########################################################################### + +job.notification.enable=true +#Provide comma separated email ids as a string if more than one +job.notification.emailids= +job.notification.flags=abe + +########################################################################### +# Credential Store module Configuration +########################################################################### +start.credential.store=false +credential.store.keystore.url=/Users/chathuri/dev/airavata/credential-store/oa4mp/airavata_sym.jks +credential.store.keystore.alias=airavata +credential.store.keystore.password=airavata +credential.store.jdbc.url=jdbc:derby://localhost:1527/experiment_catalog;create=true;user=airavata;password=airavata +credential.store.jdbc.user=airavata +credential.store.jdbc.password=airavata +credential.store.jdbc.driver=org.apache.derby.jdbc.ClientDriver +credential.store.server.host=localhost +credential.store.server.port=8960 +credentialstore=org.apache.airavata.credential.store.server.CredentialStoreServer +credential.store.thrift.server.keystore=/Users/chathuri/dev/airavata/credential-store/oa4mp/airavata.jks +credential.store.thrift.server.keystore.password=airavata + +notifier.enabled=false +#period in milliseconds +notifier.duration=5000 + +email.server=smtp.googlemail.com +email.server.port=465 +email.user=airavata +email.password=xxx +email.ssl=true [email protected] + +########################################################################### +# Airavata GFac MyProxy GSI credentials to access Grid Resources. +########################################################################### +# +# Security Configuration used by Airavata Generic Factory Service +# to interact with Computational Resources. +# +gfac.thread.pool.size=50 +airavata.server.thread.pool.size=50 +gfac=org.apache.airavata.gfac.server.GfacServer +myproxy.server=myproxy.teragrid.org +myproxy.username=ogce +myproxy.password= +myproxy.life=3600 +# XSEDE Trusted certificates can be downloaded from https://software.xsede.org/security/xsede-certs.tar.gz +trusted.cert.location=/Users/lahirugunathilake/Downloads/certificates +gfac.passive=true +# SSH PKI key pair or ssh password can be used SSH based authentication is used. +# if user specify both password authentication gets the higher preference + +################# ---------- For ssh key pair authentication ------------------- ################ +#public.ssh.key=/path to public key for ssh +#private.ssh.key=/path to private key file for ssh +#ssh.keypass=passphrase for the private key +#ssh.username=username for ssh connection +### Incase of password authentication. +#ssh.password=Password for ssh connection + +################ ---------- BES Properties ------------------- ############### +#bes.ca.cert.path=<location>/certificates/cacert.pem +#bes.ca.key.path=<location>/certificates/cakey.pem +#bes.ca.key.pass=passphrase + + +########################################################################### +# Airavata Workflow Interpreter Configurations +########################################################################### + +#runInThread=true +#provenance=true +#provenanceWriterThreadPoolSize=20 +#gfac.embedded=true +#workflowserver=org.apache.airavata.api.server.WorkflowServer +enactment.thread.pool.size=10 + +#to define custom workflow parser user following property +#workflow.parser=org.apache.airavata.workflow.core.parser.AiravataWorkflowParser + + +########################################################################### +# API Server module Configuration +########################################################################### +apiserver=org.apache.airavata.api.server.AiravataAPIServer + +########################################################################### +# Workflow Server module Configuration +########################################################################### + +workflowserver=org.apache.airavata.api.server.WorkflowServer + +########################################################################### +# Advance configuration to change service implementations +########################################################################### +# If false, disables two phase commit when submitting jobs +TwoPhase=true +# +# Class which implemented HostScheduler interface. It will determine the which host to submit the request +# +host.scheduler=org.apache.airavata.gfac.core.scheduler.impl.SimpleHostScheduler + +########################################################################### +# Monitoring module Configuration +########################################################################### + +#This will be the primary monitoring tool which runs in airavata, in future there will be multiple monitoring +#mechanisms and one would be able to start a monitor +monitors=org.apache.airavata.gfac.monitor.impl.pull.qstat.QstatMonitor,org.apache.airavata.gfac.monitor.impl.LocalJobMonitor + +#These properties will used to enable email base monitoring +email.based.monitor.host=imap.gmail.com [email protected] +email.based.monitor.password=changeme +email.based.monitor.folder.name=INBOX +# either imaps or pop3 +email.based.monitor.store.protocol=imaps +#These property will be used to query the email server periodically. value in milliseconds(ms). +email.based.monitoring.period=10000 +########################################################################### +# AMQP Notification Configuration +########################################################################### + + +amqp.notification.enable=1 + +amqp.broker.host=localhost +amqp.broker.port=5672 +amqp.broker.username=guest +amqp.broker.password=guest + +amqp.sender=org.apache.airavata.wsmg.client.amqp.rabbitmq.AMQPSenderImpl +amqp.topic.sender=org.apache.airavata.wsmg.client.amqp.rabbitmq.AMQPTopicSenderImpl +amqp.broadcast.sender=org.apache.airavata.wsmg.client.amqp.rabbitmq.AMQPBroadcastSenderImpl + +#,org.apache.airavata.gfac.monitor.impl.push.amqp.AMQPMonitor +#This is the amqp related configuration and this lists down the Rabbitmq host, this is an xsede specific configuration +amqp.hosts=info1.dyn.teragrid.org,info2.dyn.teragrid.org +proxy.file.path=/Users/lahirugunathilake/Downloads/x509up_u503876 +connection.name=xsede +#publisher +#for simple scenarios we can use the guest user +rabbitmq.broker.url=amqp://localhost:5672 +#for production scenarios, give url as amqp://userName:password@hostName:portNumber/virtualHost, create user, virtualhost +# and give permissions, refer: http://blog.dtzq.com/2012/06/rabbitmq-users-and-virtual-hosts.html +#rabbitmq.broker.url=amqp://airavata:airavata@localhost:5672/messaging + +activity.listeners=org.apache.airavata.gfac.core.monitor.AiravataJobStatusUpdator,org.apache.airavata.gfac.core.monitor.AiravataTaskStatusUpdator,org.apache.airavata.gfac.core.monitor.AiravataWorkflowNodeStatusUpdator,org.apache.airavata.api.server.listener.AiravataExperimentStatusUpdator,org.apache.airavata.gfac.core.monitor.GfacInternalStatusUpdator +status.publisher=org.apache.airavata.messaging.core.impl.RabbitMQStatusPublisher +task.launch.publisher=org.apache.airavata.messaging.core.impl.RabbitMQTaskLaunchPublisher +rabbitmq.status.exchange.name=airavata_rabbitmq_exchange +rabbitmq.task.launch.exchange.name=airavata_task_launch_rabbitmq_exchange +durable.queue=false +launch.queue.name=launch.queue +cancel.queue.name=cancel.queue +activity.publisher=org.apache.airavata.messaging.core.impl.RabbitMQPublisher +rabbitmq.exchange.name=airavata_rabbitmq_exchange + +########################################################################### +# Orchestrator module Configuration +########################################################################### + +#job.submitter=org.apache.airavata.orchestrator.core.impl.GFACEmbeddedJobSubmitter +#job.submitter=org.apache.airavata.orchestrator.core.impl.GFACPassiveJobSubmitter +#job.submitter=org.apache.airavata.orchestrator.core.impl.GFACRPCJobSubmitter +job.validators=org.apache.airavata.orchestrator.core.validator.impl.BatchQueueValidator,org.apache.airavata.orchestrator.core.validator.impl.ExperimentStatusValidator +submitter.interval=10000 +threadpool.size=10 +start.submitter=true +embedded.mode=true +enable.validation=true +orchestrator=org.apache.airavata.orchestrator.server.OrchestratorServer + +########################################################################### +# Zookeeper Server Configuration +########################################################################### + +embedded.zk=true +zookeeper.server.host=localhost +zookeeper.server.port=2181 +airavata-server=/api-server +zookeeper.timeout=30000 +orchestrator-server=/orchestrator-server +gfac-server=/gfac-server +gfac-experiments=/gfac-experiments +gfac-server-name=gfac-node0 +orchestrator-server-name=orch-node0 +airavata-server-name=api-node0 http://git-wip-us.apache.org/repos/asf/airavata/blob/e13d90da/modules/registry/airavata-mongo-registry/src/main/resources/registry-derby.sql ---------------------------------------------------------------------- diff --git a/modules/registry/airavata-mongo-registry/src/main/resources/registry-derby.sql b/modules/registry/airavata-mongo-registry/src/main/resources/registry-derby.sql new file mode 100644 index 0000000..7ab3755 --- /dev/null +++ b/modules/registry/airavata-mongo-registry/src/main/resources/registry-derby.sql @@ -0,0 +1,391 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ +CREATE TABLE GATEWAY +( + GATEWAY_ID VARCHAR (255), + GATEWAY_NAME VARCHAR(255), + DOMAIN VARCHAR(255), + EMAIL_ADDRESS VARCHAR(255), + PRIMARY KEY (GATEWAY_ID) +); + +CREATE TABLE CONFIGURATION +( + CONFIG_KEY VARCHAR(255), + CONFIG_VAL VARCHAR(255), + EXPIRE_DATE TIMESTAMP DEFAULT '0000-00-00 00:00:00', + CATEGORY_ID VARCHAR (255), + PRIMARY KEY(CONFIG_KEY, CONFIG_VAL, CATEGORY_ID) +); + +INSERT INTO CONFIGURATION (CONFIG_KEY, CONFIG_VAL, EXPIRE_DATE, CATEGORY_ID) VALUES('registry.version', '0.15', CURRENT_TIMESTAMP ,'SYSTEM'); + +CREATE TABLE USERS +( + USER_NAME VARCHAR(255), + PASSWORD VARCHAR(255), + PRIMARY KEY(USER_NAME) +); + +CREATE TABLE GATEWAY_WORKER +( + GATEWAY_ID VARCHAR(255), + USER_NAME VARCHAR(255), + PRIMARY KEY (GATEWAY_ID, USER_NAME), + FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE, + FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE +); + +CREATE TABLE PROJECT +( + GATEWAY_ID VARCHAR(255), + USER_NAME VARCHAR(255) NOT NULL, + PROJECT_ID VARCHAR(255), + PROJECT_NAME VARCHAR(255) NOT NULL, + DESCRIPTION VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (PROJECT_ID), + FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE, + FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE +); + +CREATE TABLE PROJECT_USER +( + PROJECT_ID VARCHAR(255), + USER_NAME VARCHAR(255), + PRIMARY KEY (PROJECT_ID,USER_NAME), + FOREIGN KEY (PROJECT_ID) REFERENCES PROJECT(PROJECT_ID) ON DELETE CASCADE, + FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE +); + +CREATE TABLE EXPERIMENT +( + EXPERIMENT_ID VARCHAR(255), + GATEWAY_ID VARCHAR(255), + EXECUTION_USER VARCHAR(255) NOT NULL, + PROJECT_ID VARCHAR(255) NOT NULL, + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + EXPERIMENT_NAME VARCHAR(255) NOT NULL, + EXPERIMENT_DESCRIPTION VARCHAR(255), + APPLICATION_ID VARCHAR(255), + APPLICATION_VERSION VARCHAR(255), + WORKFLOW_TEMPLATE_ID VARCHAR(255), + WORKFLOW_TEMPLATE_VERSION VARCHAR(255), + WORKFLOW_EXECUTION_ID VARCHAR(255), + ALLOW_NOTIFICATION SMALLINT, + GATEWAY_EXECUTION_ID VARCHAR(255), + PRIMARY KEY(EXPERIMENT_ID), + FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE, + FOREIGN KEY (EXECUTION_USER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE, + FOREIGN KEY (PROJECT_ID) REFERENCES PROJECT(PROJECT_ID) ON DELETE CASCADE +); + +CREATE TABLE EXPERIMENT_INPUT +( + EXPERIMENT_ID VARCHAR(255), + INPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + METADATA VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + STANDARD_INPUT SMALLINT, + USER_FRIENDLY_DESC VARCHAR(255), + VALUE CLOB, + INPUT_ORDER INTEGER, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_STAGED SMALLINT, + PRIMARY KEY(EXPERIMENT_ID,INPUT_KEY), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE +); + +CREATE TABLE EXPERIMENT_OUTPUT +( + EXPERIMENT_ID VARCHAR(255), + OUTPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + VALUE CLOB, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_MOVEMENT SMALLINT, + DATA_NAME_LOCATION VARCHAR(255), + SEARCH_QUERY VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + PRIMARY KEY(EXPERIMENT_ID,OUTPUT_KEY), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE +); + + +CREATE TABLE WORKFLOW_NODE_DETAIL +( + EXPERIMENT_ID VARCHAR(255) NOT NULL, + NODE_INSTANCE_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + NODE_NAME VARCHAR(255) NOT NULL, + EXECUTION_UNIT VARCHAR(255) NOT NULL, + EXECUTION_UNIT_DATA VARCHAR(255), + PRIMARY KEY(NODE_INSTANCE_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE +); + +CREATE TABLE TASK_DETAIL +( + TASK_ID VARCHAR(255), + NODE_INSTANCE_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + APPLICATION_ID VARCHAR(255), + APPLICATION_VERSION VARCHAR(255), + APPLICATION_DEPLOYMENT_ID VARCHAR(255), + ALLOW_NOTIFICATION SMALLINT, + PRIMARY KEY(TASK_ID), + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE NOTIFICATION_EMAIL +( + EMAIL_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + EMAIL_ADDRESS VARCHAR(255), + PRIMARY KEY(EMAIL_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE ERROR_DETAIL +( + ERROR_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + NODE_INSTANCE_ID VARCHAR(255), + JOB_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + ACTUAL_ERROR_MESSAGE CLOB, + USER_FRIEDNLY_ERROR_MSG VARCHAR(255), + TRANSIENT_OR_PERSISTENT SMALLINT, + ERROR_CATEGORY VARCHAR(255), + CORRECTIVE_ACTION VARCHAR(255), + ACTIONABLE_GROUP VARCHAR(255), + PRIMARY KEY(ERROR_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE, + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE APPLICATION_INPUT +( + TASK_ID VARCHAR(255), + INPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + METADATA VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + STANDARD_INPUT SMALLINT, + USER_FRIENDLY_DESC VARCHAR(255), + VALUE CLOB, + INPUT_ORDER INTEGER, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_STAGED SMALLINT, + PRIMARY KEY(TASK_ID,INPUT_KEY), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE APPLICATION_OUTPUT +( + TASK_ID VARCHAR(255), + OUTPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + VALUE CLOB, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_MOVEMENT SMALLINT, + DATA_NAME_LOCATION VARCHAR(255), + SEARCH_QUERY VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + PRIMARY KEY(TASK_ID,OUTPUT_KEY), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE NODE_INPUT +( + NODE_INSTANCE_ID VARCHAR(255), + INPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + METADATA VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + STANDARD_INPUT SMALLINT, + USER_FRIENDLY_DESC VARCHAR(255), + VALUE VARCHAR(255), + INPUT_ORDER INTEGER, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_STAGED SMALLINT, + PRIMARY KEY(NODE_INSTANCE_ID,INPUT_KEY), + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE NODE_OUTPUT +( + NODE_INSTANCE_ID VARCHAR(255), + OUTPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + VALUE VARCHAR(255), + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_MOVEMENT SMALLINT, + DATA_NAME_LOCATION VARCHAR(255), + SEARCH_QUERY VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + PRIMARY KEY(NODE_INSTANCE_ID,OUTPUT_KEY), + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE JOB_DETAIL +( + JOB_ID VARCHAR(255), + TASK_ID VARCHAR(255), + JOB_DESCRIPTION CLOB NOT NULL, + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + COMPUTE_RESOURCE_CONSUMED VARCHAR(255), + JOBNAME VARCHAR (255), + WORKING_DIR VARCHAR(255), + PRIMARY KEY (TASK_ID, JOB_ID), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE DATA_TRANSFER_DETAIL +( + TRANSFER_ID VARCHAR(255), + TASK_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + TRANSFER_DESC VARCHAR(255) NOT NULL, + PRIMARY KEY(TRANSFER_ID), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE STATUS +( + STATUS_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + NODE_INSTANCE_ID VARCHAR(255), + TRANSFER_ID VARCHAR(255), + TASK_ID VARCHAR(255), + JOB_ID VARCHAR(255), + STATE VARCHAR(255), + STATUS_UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00', + STATUS_TYPE VARCHAR(255), + PRIMARY KEY(STATUS_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE, + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE, + FOREIGN KEY (TRANSFER_ID) REFERENCES DATA_TRANSFER_DETAIL(TRANSFER_ID) ON DELETE CASCADE +); + +CREATE TABLE CONFIG_DATA +( + EXPERIMENT_ID VARCHAR(255), + AIRAVATA_AUTO_SCHEDULE SMALLINT NOT NULL, + OVERRIDE_MANUAL_SCHEDULE_PARAMS SMALLINT NOT NULL, + SHARE_EXPERIMENT SMALLINT, + USER_DN VARCHAR(255), + GENERATE_CERT SMALLINT, + PRIMARY KEY(EXPERIMENT_ID) +); + +CREATE TABLE COMPUTATIONAL_RESOURCE_SCHEDULING +( + RESOURCE_SCHEDULING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + RESOURCE_HOST_ID VARCHAR(255), + CPU_COUNT INTEGER, + NODE_COUNT INTEGER, + NO_OF_THREADS INTEGER, + QUEUE_NAME VARCHAR(255), + WALLTIME_LIMIT INTEGER, + JOB_START_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00', + TOTAL_PHYSICAL_MEMORY INTEGER, + COMPUTATIONAL_PROJECT_ACCOUNT VARCHAR(255), + CHESSIS_NAME VARCHAR(255), + PRIMARY KEY(RESOURCE_SCHEDULING_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE ADVANCE_INPUT_DATA_HANDLING +( + INPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + WORKING_DIR_PARENT VARCHAR(255), + UNIQUE_WORKING_DIR VARCHAR(255), + STAGE_INPUT_FILES_TO_WORKING_DIR SMALLINT, + CLEAN_AFTER_JOB SMALLINT, + PRIMARY KEY(INPUT_DATA_HANDLING_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE ADVANCE_OUTPUT_DATA_HANDLING +( + OUTPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + OUTPUT_DATA_DIR VARCHAR(255), + DATA_REG_URL VARCHAR (255), + PERSIST_OUTPUT_DATA SMALLINT, + PRIMARY KEY(OUTPUT_DATA_HANDLING_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE QOS_PARAM +( + QOS_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + START_EXECUTION_AT VARCHAR(255), + EXECUTE_BEFORE VARCHAR(255), + NO_OF_RETRIES INTEGER, + PRIMARY KEY(QOS_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE COMMUNITY_USER +( + GATEWAY_ID VARCHAR(256) NOT NULL, + COMMUNITY_USER_NAME VARCHAR(256) NOT NULL, + TOKEN_ID VARCHAR(256) NOT NULL, + COMMUNITY_USER_EMAIL VARCHAR(256) NOT NULL, + PRIMARY KEY (GATEWAY_ID, COMMUNITY_USER_NAME, TOKEN_ID) +); + +CREATE TABLE CREDENTIALS +( + GATEWAY_ID VARCHAR(256) NOT NULL, + TOKEN_ID VARCHAR(256) NOT NULL, + CREDENTIAL BLOB NOT NULL, + PORTAL_USER_ID VARCHAR(256) NOT NULL, + TIME_PERSISTED TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (GATEWAY_ID, TOKEN_ID) +); + + http://git-wip-us.apache.org/repos/asf/airavata/blob/e13d90da/modules/registry/airavata-mongo-registry/src/main/resources/registry-mysql.sql ---------------------------------------------------------------------- diff --git a/modules/registry/airavata-mongo-registry/src/main/resources/registry-mysql.sql b/modules/registry/airavata-mongo-registry/src/main/resources/registry-mysql.sql new file mode 100644 index 0000000..14d7fc8 --- /dev/null +++ b/modules/registry/airavata-mongo-registry/src/main/resources/registry-mysql.sql @@ -0,0 +1,392 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ +CREATE TABLE GATEWAY +( + GATEWAY_ID VARCHAR(255), + GATEWAY_NAME VARCHAR(255), + DOMAIN VARCHAR(255), + EMAIL_ADDRESS VARCHAR(255), + PRIMARY KEY (GATEWAY_ID) +); + +CREATE TABLE CONFIGURATION +( + CONFIG_KEY VARCHAR(255), + CONFIG_VAL VARCHAR(255), + EXPIRE_DATE TIMESTAMP DEFAULT '0000-00-00 00:00:00', + CATEGORY_ID VARCHAR (255), + PRIMARY KEY(CONFIG_KEY, CONFIG_VAL, CATEGORY_ID) +); + +INSERT INTO CONFIGURATION (CONFIG_KEY, CONFIG_VAL, EXPIRE_DATE, CATEGORY_ID) VALUES('registry.version', '0.15', CURRENT_TIMESTAMP ,'SYSTEM'); + +CREATE TABLE USERS +( + USER_NAME VARCHAR(255), + PASSWORD VARCHAR(255), + PRIMARY KEY(USER_NAME) +); + +CREATE TABLE GATEWAY_WORKER +( + GATEWAY_ID VARCHAR(255), + USER_NAME VARCHAR(255), + PRIMARY KEY (GATEWAY_ID, USER_NAME), + FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE, + FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE +); + +CREATE TABLE PROJECT +( + GATEWAY_ID VARCHAR(255), + USER_NAME VARCHAR(255), + PROJECT_NAME VARCHAR(255) NOT NULL, + PROJECT_ID VARCHAR(255), + DESCRIPTION VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (PROJECT_ID), + FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE, + FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE +); + +CREATE TABLE PROJECT_USER +( + PROJECT_ID VARCHAR(255), + USER_NAME VARCHAR(255), + PRIMARY KEY (PROJECT_ID,USER_NAME), + FOREIGN KEY (PROJECT_ID) REFERENCES PROJECT(PROJECT_ID) ON DELETE CASCADE, + FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE +); + +CREATE TABLE EXPERIMENT +( + EXPERIMENT_ID VARCHAR(255), + GATEWAY_ID VARCHAR(255), + EXECUTION_USER VARCHAR(255) NOT NULL, + PROJECT_ID VARCHAR(255) NOT NULL, + CREATION_TIME TIMESTAMP DEFAULT NOW(), + EXPERIMENT_NAME VARCHAR(255) NOT NULL, + EXPERIMENT_DESCRIPTION VARCHAR(255), + APPLICATION_ID VARCHAR(255), + APPLICATION_VERSION VARCHAR(255), + WORKFLOW_TEMPLATE_ID VARCHAR(255), + WORKFLOW_TEMPLATE_VERSION VARCHAR(255), + WORKFLOW_EXECUTION_ID VARCHAR(255), + ALLOW_NOTIFICATION SMALLINT, + GATEWAY_EXECUTION_ID VARCHAR(255), + PRIMARY KEY(EXPERIMENT_ID), + FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE, + FOREIGN KEY (EXECUTION_USER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE, + FOREIGN KEY (PROJECT_ID) REFERENCES PROJECT(PROJECT_ID) ON DELETE CASCADE +); + +CREATE TABLE EXPERIMENT_INPUT +( + EXPERIMENT_ID VARCHAR(255), + INPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + STANDARD_INPUT SMALLINT, + USER_FRIENDLY_DESC VARCHAR(255), + METADATA VARCHAR(255), + VALUE LONGTEXT, + INPUT_ORDER INTEGER, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_STAGED SMALLINT, + PRIMARY KEY(EXPERIMENT_ID,INPUT_KEY), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE +); + +CREATE TABLE EXPERIMENT_OUTPUT +( + EXPERIMENT_ID VARCHAR(255), + OUTPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + VALUE LONGTEXT, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_MOVEMENT SMALLINT, + DATA_NAME_LOCATION VARCHAR(255), + SEARCH_QUERY VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + PRIMARY KEY(EXPERIMENT_ID,OUTPUT_KEY), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE +); + +CREATE TABLE WORKFLOW_NODE_DETAIL +( + EXPERIMENT_ID VARCHAR(255) NOT NULL, + NODE_INSTANCE_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT NOW(), + NODE_NAME VARCHAR(255) NOT NULL, + EXECUTION_UNIT VARCHAR(255) NOT NULL, + EXECUTION_UNIT_DATA VARCHAR(255), + PRIMARY KEY(NODE_INSTANCE_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE +); + +CREATE TABLE TASK_DETAIL +( + TASK_ID VARCHAR(255), + NODE_INSTANCE_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT NOW(), + APPLICATION_ID VARCHAR(255), + APPLICATION_VERSION VARCHAR(255), + APPLICATION_DEPLOYMENT_ID VARCHAR(255), + ALLOW_NOTIFICATION SMALLINT, + PRIMARY KEY(TASK_ID), + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE NOTIFICATION_EMAIL +( + EMAIL_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + EMAIL_ADDRESS VARCHAR(255), + PRIMARY KEY(EMAIL_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE APPLICATION_INPUT +( + TASK_ID VARCHAR(255), + INPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + STANDARD_INPUT SMALLINT, + USER_FRIENDLY_DESC VARCHAR(255), + METADATA VARCHAR(255), + VALUE LONGTEXT, + INPUT_ORDER INTEGER, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_STAGED SMALLINT, + PRIMARY KEY(TASK_ID,INPUT_KEY), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE APPLICATION_OUTPUT +( + TASK_ID VARCHAR(255), + OUTPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + VALUE LONGTEXT, + DATA_MOVEMENT SMALLINT, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_NAME_LOCATION VARCHAR(255), + SEARCH_QUERY VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + PRIMARY KEY(TASK_ID,OUTPUT_KEY), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE NODE_INPUT +( + NODE_INSTANCE_ID VARCHAR(255), + INPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + STANDARD_INPUT SMALLINT, + USER_FRIENDLY_DESC VARCHAR(255), + METADATA VARCHAR(255), + VALUE VARCHAR(255), + INPUT_ORDER INTEGER, + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_STAGED SMALLINT, + PRIMARY KEY(NODE_INSTANCE_ID,INPUT_KEY), + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE NODE_OUTPUT +( + NODE_INSTANCE_ID VARCHAR(255), + OUTPUT_KEY VARCHAR(255) NOT NULL, + DATA_TYPE VARCHAR(255), + VALUE VARCHAR(255), + IS_REQUIRED SMALLINT, + REQUIRED_TO_COMMANDLINE SMALLINT, + DATA_MOVEMENT SMALLINT, + DATA_NAME_LOCATION VARCHAR(255), + SEARCH_QUERY VARCHAR(255), + APP_ARGUMENT VARCHAR(255), + PRIMARY KEY(NODE_INSTANCE_ID,OUTPUT_KEY), + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE JOB_DETAIL +( + JOB_ID VARCHAR(255), + TASK_ID VARCHAR(255), + JOB_DESCRIPTION LONGTEXT NOT NULL, + CREATION_TIME TIMESTAMP DEFAULT NOW(), + COMPUTE_RESOURCE_CONSUMED VARCHAR(255), + JOBNAME VARCHAR (255), + WORKING_DIR VARCHAR(255), + PRIMARY KEY (TASK_ID, JOB_ID), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE DATA_TRANSFER_DETAIL +( + TRANSFER_ID VARCHAR(255), + TASK_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT NOW(), + TRANSFER_DESC VARCHAR(255) NOT NULL, + PRIMARY KEY(TRANSFER_ID), + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE ERROR_DETAIL +( + ERROR_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + NODE_INSTANCE_ID VARCHAR(255), + JOB_ID VARCHAR(255), + CREATION_TIME TIMESTAMP DEFAULT NOW(), + ACTUAL_ERROR_MESSAGE LONGTEXT, + USER_FRIEDNLY_ERROR_MSG VARCHAR(255), + TRANSIENT_OR_PERSISTENT SMALLINT, + ERROR_CATEGORY VARCHAR(255), + CORRECTIVE_ACTION VARCHAR(255), + ACTIONABLE_GROUP VARCHAR(255), + PRIMARY KEY(ERROR_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE, + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE +); + +CREATE TABLE STATUS +( + STATUS_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + NODE_INSTANCE_ID VARCHAR(255), + TRANSFER_ID VARCHAR(255), + TASK_ID VARCHAR(255), + JOB_ID VARCHAR(255), + STATE VARCHAR(255), + STATUS_UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ON UPDATE now(), + STATUS_TYPE VARCHAR(255), + PRIMARY KEY(STATUS_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE, + FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE, + FOREIGN KEY (TRANSFER_ID) REFERENCES DATA_TRANSFER_DETAIL(TRANSFER_ID) ON DELETE CASCADE +); + +CREATE TABLE CONFIG_DATA +( + EXPERIMENT_ID VARCHAR(255), + AIRAVATA_AUTO_SCHEDULE SMALLINT NOT NULL, + OVERRIDE_MANUAL_SCHEDULE_PARAMS SMALLINT NOT NULL, + SHARE_EXPERIMENT SMALLINT, + USER_DN VARCHAR(255), + GENERATE_CERT SMALLINT, + PRIMARY KEY(EXPERIMENT_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE + +); + +CREATE TABLE COMPUTATIONAL_RESOURCE_SCHEDULING +( + RESOURCE_SCHEDULING_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + RESOURCE_HOST_ID VARCHAR(255), + CPU_COUNT INTEGER, + NODE_COUNT INTEGER, + NO_OF_THREADS INTEGER, + QUEUE_NAME VARCHAR(255), + WALLTIME_LIMIT INTEGER, + JOB_START_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00', + TOTAL_PHYSICAL_MEMORY INTEGER, + COMPUTATIONAL_PROJECT_ACCOUNT VARCHAR(255), + CHESSIS_NAME VARCHAR(255), + PRIMARY KEY(RESOURCE_SCHEDULING_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE ADVANCE_INPUT_DATA_HANDLING +( + INPUT_DATA_HANDLING_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + WORKING_DIR_PARENT VARCHAR(255), + UNIQUE_WORKING_DIR VARCHAR(255), + STAGE_INPUT_FILES_TO_WORKING_DIR SMALLINT, + CLEAN_AFTER_JOB SMALLINT, + PRIMARY KEY(INPUT_DATA_HANDLING_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE ADVANCE_OUTPUT_DATA_HANDLING +( + OUTPUT_DATA_HANDLING_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + OUTPUT_DATA_DIR VARCHAR(255), + DATA_REG_URL VARCHAR (255), + PERSIST_OUTPUT_DATA SMALLINT, + PRIMARY KEY(OUTPUT_DATA_HANDLING_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE QOS_PARAM +( + QOS_ID INTEGER NOT NULL AUTO_INCREMENT, + EXPERIMENT_ID VARCHAR(255), + TASK_ID VARCHAR(255), + START_EXECUTION_AT VARCHAR(255), + EXECUTE_BEFORE VARCHAR(255), + NO_OF_RETRIES INTEGER, + PRIMARY KEY(QOS_ID), + FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE, + FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE +); + +CREATE TABLE COMMUNITY_USER +( + GATEWAY_ID VARCHAR(256) NOT NULL, + COMMUNITY_USER_NAME VARCHAR(256) NOT NULL, + TOKEN_ID VARCHAR(256) NOT NULL, + COMMUNITY_USER_EMAIL VARCHAR(256) NOT NULL, + PRIMARY KEY (GATEWAY_ID, COMMUNITY_USER_NAME, TOKEN_ID) +); + +CREATE TABLE CREDENTIALS +( + GATEWAY_ID VARCHAR(256) NOT NULL, + TOKEN_ID VARCHAR(256) NOT NULL, + CREDENTIAL BLOB NOT NULL, + PORTAL_USER_ID VARCHAR(256) NOT NULL, + TIME_PERSISTED TIMESTAMP DEFAULT NOW() ON UPDATE NOW(), + PRIMARY KEY (GATEWAY_ID, TOKEN_ID) +); + +
